mirror of https://github.com/apache/lucene.git
LUCENE-6919: Make Scorer expose an iterator instead of extending DocIdSetIterator.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1719081 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d33ca97167
commit
ddb3a97678
|
@ -123,6 +123,10 @@ API Changes
|
||||||
* LUCENE-6900: Grouping sortWithinGroup variables used to allow null to mean
|
* LUCENE-6900: Grouping sortWithinGroup variables used to allow null to mean
|
||||||
Sort.RELEVANCE. Null is no longer permitted. (David Smiley)
|
Sort.RELEVANCE. Null is no longer permitted. (David Smiley)
|
||||||
|
|
||||||
|
* LUCENE-6919: The Scorer class has been refactored to expose an iterator
|
||||||
|
instead of extending DocIdSetIterator. asTwoPhaseIterator() has been renamed
|
||||||
|
to twoPhaseIterator() for consistency. (Adrien Grand)
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
|
|
||||||
* LUCENE-6889: BooleanQuery.rewrite now performs some query optimization, in
|
* LUCENE-6889: BooleanQuery.rewrite now performs some query optimization, in
|
||||||
|
|
|
@ -30,6 +30,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
|
@ -690,8 +691,9 @@ class BufferedUpdatesStream implements Accountable {
|
||||||
final IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
final IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query, false);
|
final Weight weight = searcher.createNormalizedWeight(query, false);
|
||||||
final DocIdSetIterator it = weight.scorer(readerContext);
|
final Scorer scorer = weight.scorer(readerContext);
|
||||||
if (it != null) {
|
if (scorer != null) {
|
||||||
|
final DocIdSetIterator it = scorer.iterator();
|
||||||
final Bits liveDocs = readerContext.reader().getLiveDocs();
|
final Bits liveDocs = readerContext.reader().getLiveDocs();
|
||||||
while (true) {
|
while (true) {
|
||||||
int doc = it.nextDoc();
|
int doc = it.nextDoc();
|
||||||
|
|
|
@ -126,19 +126,22 @@ class BooleanTopLevelScorers {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
// TODO: sum into a double and cast to float if we ever send required clauses to BS1
|
||||||
int curDoc = reqScorer.docID();
|
int curDoc = reqScorer.docID();
|
||||||
float reqScore = reqScorer.score();
|
float score = reqScorer.score();
|
||||||
if (optScorer == null) {
|
|
||||||
return reqScore * coordReq;
|
int optScorerDoc = optIterator.docID();
|
||||||
|
if (optScorerDoc < curDoc) {
|
||||||
|
optScorerDoc = optIterator.advance(curDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
int optScorerDoc = optScorer.docID();
|
if (optScorerDoc == curDoc) {
|
||||||
if (optScorerDoc < curDoc && (optScorerDoc = optScorer.advance(curDoc)) == NO_MORE_DOCS) {
|
score = (score + optScorer.score()) * coordBoth;
|
||||||
optScorer = null;
|
} else {
|
||||||
return reqScore * coordReq;
|
score = score * coordReq;
|
||||||
}
|
}
|
||||||
|
|
||||||
return optScorerDoc == curDoc ? (reqScore + optScorer.score()) * coordBoth : reqScore * coordReq;
|
return score;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,19 +161,22 @@ class BooleanTopLevelScorers {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
// TODO: sum into a double and cast to float if we ever send required clauses to BS1
|
||||||
int curDoc = reqScorer.docID();
|
int curDoc = reqScorer.docID();
|
||||||
float reqScore = reqScorer.score();
|
float score = reqScorer.score();
|
||||||
if (optScorer == null) {
|
|
||||||
return reqScore * coords[requiredCount];
|
int optScorerDoc = optIterator.docID();
|
||||||
|
if (optScorerDoc < curDoc) {
|
||||||
|
optScorerDoc = optIterator.advance(curDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
int optScorerDoc = optScorer.docID();
|
if (optScorerDoc == curDoc) {
|
||||||
if (optScorerDoc < curDoc && (optScorerDoc = optScorer.advance(curDoc)) == NO_MORE_DOCS) {
|
score = (score + optScorer.score()) * coords[requiredCount + optScorer.freq()];
|
||||||
optScorer = null;
|
} else {
|
||||||
return reqScore * coords[requiredCount];
|
score = score * coords[requiredCount];
|
||||||
}
|
}
|
||||||
|
|
||||||
return optScorerDoc == curDoc ? (reqScore + optScorer.score()) * coords[requiredCount + optScorer.freq()] : reqScore * coords[requiredCount];
|
return score;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ public abstract class BulkScorer {
|
||||||
public abstract int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException;
|
public abstract int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Same as {@link Scorer#cost()} for bulk scorers.
|
* Same as {@link DocIdSetIterator#cost()} for bulk scorers.
|
||||||
*/
|
*/
|
||||||
public abstract long cost();
|
public abstract long cost();
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,24 +61,22 @@ public abstract class CachingCollector extends FilterCollector {
|
||||||
|
|
||||||
private CachedScorer() { super(null); }
|
private CachedScorer() { super(null); }
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final float score() { return score; }
|
public final float score() { return score; }
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int advance(int target) { throw new UnsupportedOperationException(); }
|
public int docID() {
|
||||||
|
return doc;
|
||||||
@Override
|
}
|
||||||
public final int docID() { return doc; }
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int freq() { throw new UnsupportedOperationException(); }
|
public final int freq() { throw new UnsupportedOperationException(); }
|
||||||
|
|
||||||
@Override
|
|
||||||
public final int nextDoc() { throw new UnsupportedOperationException(); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() { return 1; }
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class NoScoreCachingCollector extends CachingCollector {
|
private static class NoScoreCachingCollector extends CachingCollector {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.spans.Spans;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
|
|
||||||
/** A conjunction of DocIdSetIterators.
|
/** A conjunction of DocIdSetIterators.
|
||||||
|
@ -32,16 +33,16 @@ import org.apache.lucene.util.CollectionUtil;
|
||||||
*/
|
*/
|
||||||
public class ConjunctionDISI extends DocIdSetIterator {
|
public class ConjunctionDISI extends DocIdSetIterator {
|
||||||
|
|
||||||
/** Create a conjunction over the provided iterators, taking advantage of
|
/** Create a conjunction over the provided {@link Scorer}s, taking advantage
|
||||||
* {@link TwoPhaseIterator}. */
|
* of {@link TwoPhaseIterator}. */
|
||||||
public static ConjunctionDISI intersect(List<? extends DocIdSetIterator> iterators) {
|
public static ConjunctionDISI intersectScorers(List<Scorer> scorers) {
|
||||||
if (iterators.size() < 2) {
|
if (scorers.size() < 2) {
|
||||||
throw new IllegalArgumentException("Cannot make a ConjunctionDISI of less than 2 iterators");
|
throw new IllegalArgumentException("Cannot make a ConjunctionDISI of less than 2 iterators");
|
||||||
}
|
}
|
||||||
final List<DocIdSetIterator> allIterators = new ArrayList<>();
|
final List<DocIdSetIterator> allIterators = new ArrayList<>();
|
||||||
final List<TwoPhaseIterator> twoPhaseIterators = new ArrayList<>();
|
final List<TwoPhaseIterator> twoPhaseIterators = new ArrayList<>();
|
||||||
for (DocIdSetIterator iter : iterators) {
|
for (Scorer scorer : scorers) {
|
||||||
addIterator(iter, allIterators, twoPhaseIterators);
|
addScorer(scorer, allIterators, twoPhaseIterators);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (twoPhaseIterators.isEmpty()) {
|
if (twoPhaseIterators.isEmpty()) {
|
||||||
|
@ -51,12 +52,66 @@ public class ConjunctionDISI extends DocIdSetIterator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Adds the iterator, possibly splitting up into two phases or collapsing if it is another conjunction */
|
/** Create a conjunction over the provided DocIdSetIterators. */
|
||||||
|
public static ConjunctionDISI intersectIterators(List<DocIdSetIterator> iterators) {
|
||||||
|
if (iterators.size() < 2) {
|
||||||
|
throw new IllegalArgumentException("Cannot make a ConjunctionDISI of less than 2 iterators");
|
||||||
|
}
|
||||||
|
final List<DocIdSetIterator> allIterators = new ArrayList<>();
|
||||||
|
final List<TwoPhaseIterator> twoPhaseIterators = new ArrayList<>();
|
||||||
|
for (DocIdSetIterator iterator : iterators) {
|
||||||
|
addIterator(iterator, allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (twoPhaseIterators.isEmpty()) {
|
||||||
|
return new ConjunctionDISI(allIterators);
|
||||||
|
} else {
|
||||||
|
return new TwoPhase(allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Create a conjunction over the provided {@link Scorer}s, taking advantage
|
||||||
|
* of {@link TwoPhaseIterator}. */
|
||||||
|
public static ConjunctionDISI intersectSpans(List<Spans> spanList) {
|
||||||
|
if (spanList.size() < 2) {
|
||||||
|
throw new IllegalArgumentException("Cannot make a ConjunctionDISI of less than 2 iterators");
|
||||||
|
}
|
||||||
|
final List<DocIdSetIterator> allIterators = new ArrayList<>();
|
||||||
|
final List<TwoPhaseIterator> twoPhaseIterators = new ArrayList<>();
|
||||||
|
for (Spans spans : spanList) {
|
||||||
|
addSpans(spans, allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (twoPhaseIterators.isEmpty()) {
|
||||||
|
return new ConjunctionDISI(allIterators);
|
||||||
|
} else {
|
||||||
|
return new TwoPhase(allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Adds the scorer, possibly splitting up into two phases or collapsing if it is another conjunction */
|
||||||
|
private static void addScorer(Scorer scorer, List<DocIdSetIterator> allIterators, List<TwoPhaseIterator> twoPhaseIterators) {
|
||||||
|
TwoPhaseIterator twoPhaseIter = scorer.twoPhaseIterator();
|
||||||
|
if (twoPhaseIter != null) {
|
||||||
|
addTwoPhaseIterator(twoPhaseIter, allIterators, twoPhaseIterators);
|
||||||
|
} else { // no approximation support, use the iterator as-is
|
||||||
|
addIterator(scorer.iterator(), allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Adds the Spans. */
|
||||||
|
private static void addSpans(Spans spans, List<DocIdSetIterator> allIterators, List<TwoPhaseIterator> twoPhaseIterators) {
|
||||||
|
TwoPhaseIterator twoPhaseIter = spans.asTwoPhaseIterator();
|
||||||
|
if (twoPhaseIter != null) {
|
||||||
|
addTwoPhaseIterator(twoPhaseIter, allIterators, twoPhaseIterators);
|
||||||
|
} else { // no approximation support, use the iterator as-is
|
||||||
|
addIterator(spans, allIterators, twoPhaseIterators);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void addIterator(DocIdSetIterator disi, List<DocIdSetIterator> allIterators, List<TwoPhaseIterator> twoPhaseIterators) {
|
private static void addIterator(DocIdSetIterator disi, List<DocIdSetIterator> allIterators, List<TwoPhaseIterator> twoPhaseIterators) {
|
||||||
// Check for exactly this class for collapsing. Subclasses can do their own optimizations.
|
// Check for exactly this class for collapsing. Subclasses can do their own optimizations.
|
||||||
if (disi.getClass() == ConjunctionScorer.class) {
|
if (disi.getClass() == ConjunctionDISI.class || disi.getClass() == TwoPhase.class) {
|
||||||
addIterator(((ConjunctionScorer) disi).disi, allIterators, twoPhaseIterators);
|
|
||||||
} else if (disi.getClass() == ConjunctionDISI.class || disi.getClass() == TwoPhase.class) {
|
|
||||||
ConjunctionDISI conjunction = (ConjunctionDISI) disi;
|
ConjunctionDISI conjunction = (ConjunctionDISI) disi;
|
||||||
// subconjuctions have already split themselves into two phase iterators and others, so we can take those
|
// subconjuctions have already split themselves into two phase iterators and others, so we can take those
|
||||||
// iterators as they are and move them up to this conjunction
|
// iterators as they are and move them up to this conjunction
|
||||||
|
@ -67,14 +122,13 @@ public class ConjunctionDISI extends DocIdSetIterator {
|
||||||
Collections.addAll(twoPhaseIterators, twoPhase.twoPhaseView.twoPhaseIterators);
|
Collections.addAll(twoPhaseIterators, twoPhase.twoPhaseView.twoPhaseIterators);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
TwoPhaseIterator twoPhaseIter = TwoPhaseIterator.asTwoPhaseIterator(disi);
|
|
||||||
if (twoPhaseIter != null) {
|
|
||||||
allIterators.add(twoPhaseIter.approximation());
|
|
||||||
twoPhaseIterators.add(twoPhaseIter);
|
|
||||||
} else { // no approximation support, use the iterator as-is
|
|
||||||
allIterators.add(disi);
|
allIterators.add(disi);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void addTwoPhaseIterator(TwoPhaseIterator twoPhaseIter, List<DocIdSetIterator> allIterators, List<TwoPhaseIterator> twoPhaseIterators) {
|
||||||
|
addIterator(twoPhaseIter.approximation(), allIterators, twoPhaseIterators);
|
||||||
|
twoPhaseIterators.add(twoPhaseIter);
|
||||||
}
|
}
|
||||||
|
|
||||||
final DocIdSetIterator lead;
|
final DocIdSetIterator lead;
|
||||||
|
|
|
@ -29,27 +29,27 @@ class ConjunctionScorer extends Scorer {
|
||||||
final Scorer[] scorers;
|
final Scorer[] scorers;
|
||||||
final float coord;
|
final float coord;
|
||||||
|
|
||||||
ConjunctionScorer(Weight weight, List<? extends DocIdSetIterator> required, List<Scorer> scorers) {
|
ConjunctionScorer(Weight weight, List<Scorer> required, List<Scorer> scorers) {
|
||||||
this(weight, required, scorers, 1f);
|
this(weight, required, scorers, 1f);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Create a new {@link ConjunctionScorer}, note that {@code scorers} must be a subset of {@code required}. */
|
/** Create a new {@link ConjunctionScorer}, note that {@code scorers} must be a subset of {@code required}. */
|
||||||
ConjunctionScorer(Weight weight, List<? extends DocIdSetIterator> required, List<Scorer> scorers, float coord) {
|
ConjunctionScorer(Weight weight, List<Scorer> required, List<Scorer> scorers, float coord) {
|
||||||
super(weight);
|
super(weight);
|
||||||
assert required.containsAll(scorers);
|
assert required.containsAll(scorers);
|
||||||
this.coord = coord;
|
this.coord = coord;
|
||||||
this.disi = ConjunctionDISI.intersect(required);
|
this.disi = ConjunctionDISI.intersectScorers(required);
|
||||||
this.scorers = scorers.toArray(new Scorer[scorers.size()]);
|
this.scorers = scorers.toArray(new Scorer[scorers.size()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return disi.asTwoPhaseIterator();
|
return disi.asTwoPhaseIterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return disi.advance(target);
|
return disi;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -57,11 +57,6 @@ class ConjunctionScorer extends Scorer {
|
||||||
return disi.docID();
|
return disi.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return disi.nextDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
double sum = 0.0d;
|
double sum = 0.0d;
|
||||||
|
@ -76,11 +71,6 @@ class ConjunctionScorer extends Scorer {
|
||||||
return scorers.length;
|
return scorers.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return disi.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<ChildScorer> getChildren() {
|
public Collection<ChildScorer> getChildren() {
|
||||||
ArrayList<ChildScorer> children = new ArrayList<>();
|
ArrayList<ChildScorer> children = new ArrayList<>();
|
||||||
|
|
|
@ -54,10 +54,20 @@ public final class ConstantScoreScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public DocIdSetIterator iterator() {
|
||||||
|
return disi;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return twoPhaseIterator;
|
return twoPhaseIterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return disi.docID();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
return score;
|
return score;
|
||||||
|
@ -68,24 +78,5 @@ public final class ConstantScoreScorer extends Scorer {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return disi.docID();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return disi.nextDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return disi.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return disi.cost();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,9 +82,9 @@ public abstract class ConstantScoreWeight extends Weight {
|
||||||
if (s == null) {
|
if (s == null) {
|
||||||
exists = false;
|
exists = false;
|
||||||
} else {
|
} else {
|
||||||
final TwoPhaseIterator twoPhase = s.asTwoPhaseIterator();
|
final TwoPhaseIterator twoPhase = s.twoPhaseIterator();
|
||||||
if (twoPhase == null) {
|
if (twoPhase == null) {
|
||||||
exists = s.advance(doc) == doc;
|
exists = s.iterator().advance(doc) == doc;
|
||||||
} else {
|
} else {
|
||||||
exists = twoPhase.approximation().advance(doc) == doc && twoPhase.matches();
|
exists = twoPhase.approximation().advance(doc) == doc && twoPhase.matches();
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,8 +28,7 @@ import org.apache.lucene.util.PriorityQueue;
|
||||||
* pluggable comparison function makes the rebalancing quite slow.
|
* pluggable comparison function makes the rebalancing quite slow.
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public final class DisiPriorityQueue<Iter extends DocIdSetIterator>
|
public final class DisiPriorityQueue implements Iterable<DisiWrapper> {
|
||||||
implements Iterable<DisiWrapper<Iter>> {
|
|
||||||
|
|
||||||
static int leftNode(int node) {
|
static int leftNode(int node) {
|
||||||
return ((node + 1) << 1) - 1;
|
return ((node + 1) << 1) - 1;
|
||||||
|
@ -43,10 +42,9 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
return ((node + 1) >>> 1) - 1;
|
return ((node + 1) >>> 1) - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final DisiWrapper<Iter>[] heap;
|
private final DisiWrapper[] heap;
|
||||||
private int size;
|
private int size;
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked","rawtypes"})
|
|
||||||
public DisiPriorityQueue(int maxSize) {
|
public DisiPriorityQueue(int maxSize) {
|
||||||
heap = new DisiWrapper[maxSize];
|
heap = new DisiWrapper[maxSize];
|
||||||
size = 0;
|
size = 0;
|
||||||
|
@ -56,15 +54,15 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DisiWrapper<Iter> top() {
|
public DisiWrapper top() {
|
||||||
return heap[0];
|
return heap[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get the list of scorers which are on the current doc. */
|
/** Get the list of scorers which are on the current doc. */
|
||||||
public DisiWrapper<Iter> topList() {
|
public DisiWrapper topList() {
|
||||||
final DisiWrapper<Iter>[] heap = this.heap;
|
final DisiWrapper[] heap = this.heap;
|
||||||
final int size = this.size;
|
final int size = this.size;
|
||||||
DisiWrapper<Iter> list = heap[0];
|
DisiWrapper list = heap[0];
|
||||||
list.next = null;
|
list.next = null;
|
||||||
if (size >= 3) {
|
if (size >= 3) {
|
||||||
list = topList(list, heap, size, 1);
|
list = topList(list, heap, size, 1);
|
||||||
|
@ -76,14 +74,14 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// prepend w1 (iterator) to w2 (list)
|
// prepend w1 (iterator) to w2 (list)
|
||||||
private DisiWrapper<Iter> prepend(DisiWrapper<Iter> w1, DisiWrapper<Iter> w2) {
|
private DisiWrapper prepend(DisiWrapper w1, DisiWrapper w2) {
|
||||||
w1.next = w2;
|
w1.next = w2;
|
||||||
return w1;
|
return w1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DisiWrapper<Iter> topList(DisiWrapper<Iter> list, DisiWrapper<Iter>[] heap,
|
private DisiWrapper topList(DisiWrapper list, DisiWrapper[] heap,
|
||||||
int size, int i) {
|
int size, int i) {
|
||||||
final DisiWrapper<Iter> w = heap[i];
|
final DisiWrapper w = heap[i];
|
||||||
if (w.doc == list.doc) {
|
if (w.doc == list.doc) {
|
||||||
list = prepend(w, list);
|
list = prepend(w, list);
|
||||||
final int left = leftNode(i);
|
final int left = leftNode(i);
|
||||||
|
@ -98,8 +96,8 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
return list;
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DisiWrapper<Iter> add(DisiWrapper<Iter> entry) {
|
public DisiWrapper add(DisiWrapper entry) {
|
||||||
final DisiWrapper<Iter>[] heap = this.heap;
|
final DisiWrapper[] heap = this.heap;
|
||||||
final int size = this.size;
|
final int size = this.size;
|
||||||
heap[size] = entry;
|
heap[size] = entry;
|
||||||
upHeap(size);
|
upHeap(size);
|
||||||
|
@ -107,9 +105,9 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
return heap[0];
|
return heap[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
public DisiWrapper<Iter> pop() {
|
public DisiWrapper pop() {
|
||||||
final DisiWrapper<Iter>[] heap = this.heap;
|
final DisiWrapper[] heap = this.heap;
|
||||||
final DisiWrapper<Iter> result = heap[0];
|
final DisiWrapper result = heap[0];
|
||||||
final int i = --size;
|
final int i = --size;
|
||||||
heap[0] = heap[i];
|
heap[0] = heap[i];
|
||||||
heap[i] = null;
|
heap[i] = null;
|
||||||
|
@ -117,18 +115,18 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DisiWrapper<Iter> updateTop() {
|
public DisiWrapper updateTop() {
|
||||||
downHeap(size);
|
downHeap(size);
|
||||||
return heap[0];
|
return heap[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
DisiWrapper<Iter> updateTop(DisiWrapper<Iter> topReplacement) {
|
DisiWrapper updateTop(DisiWrapper topReplacement) {
|
||||||
heap[0] = topReplacement;
|
heap[0] = topReplacement;
|
||||||
return updateTop();
|
return updateTop();
|
||||||
}
|
}
|
||||||
|
|
||||||
void upHeap(int i) {
|
void upHeap(int i) {
|
||||||
final DisiWrapper<Iter> node = heap[i];
|
final DisiWrapper node = heap[i];
|
||||||
final int nodeDoc = node.doc;
|
final int nodeDoc = node.doc;
|
||||||
int j = parentNode(i);
|
int j = parentNode(i);
|
||||||
while (j >= 0 && nodeDoc < heap[j].doc) {
|
while (j >= 0 && nodeDoc < heap[j].doc) {
|
||||||
|
@ -141,7 +139,7 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
|
|
||||||
void downHeap(int size) {
|
void downHeap(int size) {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
final DisiWrapper<Iter> node = heap[0];
|
final DisiWrapper node = heap[0];
|
||||||
int j = leftNode(i);
|
int j = leftNode(i);
|
||||||
if (j < size) {
|
if (j < size) {
|
||||||
int k = rightNode(j);
|
int k = rightNode(j);
|
||||||
|
@ -164,7 +162,7 @@ implements Iterable<DisiWrapper<Iter>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<DisiWrapper<Iter>> iterator() {
|
public Iterator<DisiWrapper> iterator() {
|
||||||
return Arrays.asList(heap).subList(0, size).iterator();
|
return Arrays.asList(heap).subList(0, size).iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,15 +17,18 @@ package org.apache.lucene.search;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import org.apache.lucene.search.spans.Spans;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wrapper used in {@link DisiPriorityQueue}.
|
* Wrapper used in {@link DisiPriorityQueue}.
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public class DisiWrapper<Iter extends DocIdSetIterator> {
|
public class DisiWrapper {
|
||||||
public final Iter iterator;
|
public final DocIdSetIterator iterator;
|
||||||
|
public final Scorer scorer;
|
||||||
public final long cost;
|
public final long cost;
|
||||||
public int doc; // the current doc, used for comparison
|
public int doc; // the current doc, used for comparison
|
||||||
public DisiWrapper<Iter> next; // reference to a next element, see #topList
|
public DisiWrapper next; // reference to a next element, see #topList
|
||||||
|
|
||||||
// An approximation of the iterator, or the iterator itself if it does not
|
// An approximation of the iterator, or the iterator itself if it does not
|
||||||
// support two-phase iteration
|
// support two-phase iteration
|
||||||
|
@ -34,14 +37,33 @@ public class DisiWrapper<Iter extends DocIdSetIterator> {
|
||||||
// two-phase iteration
|
// two-phase iteration
|
||||||
public final TwoPhaseIterator twoPhaseView;
|
public final TwoPhaseIterator twoPhaseView;
|
||||||
|
|
||||||
|
// FOR SPANS
|
||||||
|
public final Spans spans;
|
||||||
public int lastApproxMatchDoc; // last doc of approximation that did match
|
public int lastApproxMatchDoc; // last doc of approximation that did match
|
||||||
public int lastApproxNonMatchDoc; // last doc of approximation that did not match
|
public int lastApproxNonMatchDoc; // last doc of approximation that did not match
|
||||||
|
|
||||||
public DisiWrapper(Iter iterator) {
|
public DisiWrapper(Scorer scorer) {
|
||||||
this.iterator = iterator;
|
this.scorer = scorer;
|
||||||
|
this.spans = null;
|
||||||
|
this.iterator = scorer.iterator();
|
||||||
this.cost = iterator.cost();
|
this.cost = iterator.cost();
|
||||||
this.doc = -1;
|
this.doc = -1;
|
||||||
this.twoPhaseView = TwoPhaseIterator.asTwoPhaseIterator(iterator);
|
this.twoPhaseView = scorer.twoPhaseIterator();
|
||||||
|
|
||||||
|
if (twoPhaseView != null) {
|
||||||
|
approximation = twoPhaseView.approximation();
|
||||||
|
} else {
|
||||||
|
approximation = iterator;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public DisiWrapper(Spans spans) {
|
||||||
|
this.scorer = null;
|
||||||
|
this.spans = spans;
|
||||||
|
this.iterator = spans;
|
||||||
|
this.cost = iterator.cost();
|
||||||
|
this.doc = -1;
|
||||||
|
this.twoPhaseView = spans.asTwoPhaseIterator();
|
||||||
|
|
||||||
if (twoPhaseView != null) {
|
if (twoPhaseView != null) {
|
||||||
approximation = twoPhaseView.approximation();
|
approximation = twoPhaseView.approximation();
|
||||||
|
|
|
@ -23,16 +23,15 @@ import java.io.IOException;
|
||||||
* the provided iterators.
|
* the provided iterators.
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public class DisjunctionDISIApproximation<Iter extends DocIdSetIterator>
|
public class DisjunctionDISIApproximation extends DocIdSetIterator {
|
||||||
extends DocIdSetIterator {
|
|
||||||
|
|
||||||
final DisiPriorityQueue<Iter> subIterators;
|
final DisiPriorityQueue subIterators;
|
||||||
final long cost;
|
final long cost;
|
||||||
|
|
||||||
public DisjunctionDISIApproximation(DisiPriorityQueue<Iter> subIterators) {
|
public DisjunctionDISIApproximation(DisiPriorityQueue subIterators) {
|
||||||
this.subIterators = subIterators;
|
this.subIterators = subIterators;
|
||||||
long cost = 0;
|
long cost = 0;
|
||||||
for (DisiWrapper<Iter> w : subIterators) {
|
for (DisiWrapper w : subIterators) {
|
||||||
cost += w.cost;
|
cost += w.cost;
|
||||||
}
|
}
|
||||||
this.cost = cost;
|
this.cost = cost;
|
||||||
|
@ -50,7 +49,7 @@ extends DocIdSetIterator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
DisiWrapper<Iter> top = subIterators.top();
|
DisiWrapper top = subIterators.top();
|
||||||
final int doc = top.doc;
|
final int doc = top.doc;
|
||||||
do {
|
do {
|
||||||
top.doc = top.approximation.nextDoc();
|
top.doc = top.approximation.nextDoc();
|
||||||
|
@ -62,7 +61,7 @@ extends DocIdSetIterator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
DisiWrapper<Iter> top = subIterators.top();
|
DisiWrapper top = subIterators.top();
|
||||||
do {
|
do {
|
||||||
top.doc = top.approximation.advance(target);
|
top.doc = top.approximation.advance(target);
|
||||||
top = subIterators.updateTop();
|
top = subIterators.updateTop();
|
||||||
|
|
|
@ -46,11 +46,11 @@ final class DisjunctionMaxScorer extends DisjunctionScorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected float score(DisiWrapper<Scorer> topList) throws IOException {
|
protected float score(DisiWrapper topList) throws IOException {
|
||||||
float scoreSum = 0;
|
float scoreSum = 0;
|
||||||
float scoreMax = 0;
|
float scoreMax = 0;
|
||||||
for (DisiWrapper<Scorer> w = topList; w != null; w = w.next) {
|
for (DisiWrapper w = topList; w != null; w = w.next) {
|
||||||
final float subScore = w.iterator.score();
|
final float subScore = w.scorer.score();
|
||||||
scoreSum += subScore;
|
scoreSum += subScore;
|
||||||
if (subScore > scoreMax) {
|
if (subScore > scoreMax) {
|
||||||
scoreMax = subScore;
|
scoreMax = subScore;
|
||||||
|
|
|
@ -28,21 +28,21 @@ import java.util.List;
|
||||||
abstract class DisjunctionScorer extends Scorer {
|
abstract class DisjunctionScorer extends Scorer {
|
||||||
|
|
||||||
private final boolean needsScores;
|
private final boolean needsScores;
|
||||||
final DisiPriorityQueue<Scorer> subScorers;
|
final DisiPriorityQueue subScorers;
|
||||||
private final long cost;
|
private final long cost;
|
||||||
|
|
||||||
/** Linked list of scorers which are on the current doc */
|
/** Linked list of scorers which are on the current doc */
|
||||||
private DisiWrapper<Scorer> topScorers;
|
private DisiWrapper topScorers;
|
||||||
|
|
||||||
protected DisjunctionScorer(Weight weight, List<Scorer> subScorers, boolean needsScores) {
|
protected DisjunctionScorer(Weight weight, List<Scorer> subScorers, boolean needsScores) {
|
||||||
super(weight);
|
super(weight);
|
||||||
if (subScorers.size() <= 1) {
|
if (subScorers.size() <= 1) {
|
||||||
throw new IllegalArgumentException("There must be at least 2 subScorers");
|
throw new IllegalArgumentException("There must be at least 2 subScorers");
|
||||||
}
|
}
|
||||||
this.subScorers = new DisiPriorityQueue<Scorer>(subScorers.size());
|
this.subScorers = new DisiPriorityQueue(subScorers.size());
|
||||||
long cost = 0;
|
long cost = 0;
|
||||||
for (Scorer scorer : subScorers) {
|
for (Scorer scorer : subScorers) {
|
||||||
final DisiWrapper<Scorer> w = new DisiWrapper<>(scorer);
|
final DisiWrapper w = new DisiWrapper(scorer);
|
||||||
cost += w.cost;
|
cost += w.cost;
|
||||||
this.subScorers.add(w);
|
this.subScorers.add(w);
|
||||||
}
|
}
|
||||||
|
@ -51,13 +51,55 @@ abstract class DisjunctionScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return subScorers.top().doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final int nextDoc() throws IOException {
|
||||||
|
topScorers = null;
|
||||||
|
DisiWrapper top = subScorers.top();
|
||||||
|
final int doc = top.doc;
|
||||||
|
do {
|
||||||
|
top.doc = top.iterator.nextDoc();
|
||||||
|
top = subScorers.updateTop();
|
||||||
|
} while (top.doc == doc);
|
||||||
|
|
||||||
|
return top.doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final int advance(int target) throws IOException {
|
||||||
|
topScorers = null;
|
||||||
|
DisiWrapper top = subScorers.top();
|
||||||
|
do {
|
||||||
|
top.doc = top.iterator.advance(target);
|
||||||
|
top = subScorers.updateTop();
|
||||||
|
} while (top.doc < target);
|
||||||
|
|
||||||
|
return top.doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final long cost() {
|
||||||
|
return cost;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
float sumMatchCost = 0;
|
float sumMatchCost = 0;
|
||||||
long sumApproxCost = 0;
|
long sumApproxCost = 0;
|
||||||
|
|
||||||
// Compute matchCost as the avarage over the matchCost of the subScorers.
|
// Compute matchCost as the avarage over the matchCost of the subScorers.
|
||||||
// This is weighted by the cost, which is an expected number of matching documents.
|
// This is weighted by the cost, which is an expected number of matching documents.
|
||||||
for (DisiWrapper<Scorer> w : subScorers) {
|
for (DisiWrapper w : subScorers) {
|
||||||
if (w.twoPhaseView != null) {
|
if (w.twoPhaseView != null) {
|
||||||
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
||||||
sumMatchCost += w.twoPhaseView.matchCost() * costWeight;
|
sumMatchCost += w.twoPhaseView.matchCost() * costWeight;
|
||||||
|
@ -74,11 +116,11 @@ abstract class DisjunctionScorer extends Scorer {
|
||||||
// note it is important to share the same pq as this scorer so that
|
// note it is important to share the same pq as this scorer so that
|
||||||
// rebalancing the pq through the approximation will also rebalance
|
// rebalancing the pq through the approximation will also rebalance
|
||||||
// the pq in this scorer.
|
// the pq in this scorer.
|
||||||
return new TwoPhaseIterator(new DisjunctionDISIApproximation<Scorer>(subScorers)) {
|
return new TwoPhaseIterator(new DisjunctionDISIApproximation(subScorers)) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
DisiWrapper<Scorer> topScorers = subScorers.topList();
|
DisiWrapper topScorers = subScorers.topList();
|
||||||
// remove the head of the list as long as it does not match
|
// remove the head of the list as long as it does not match
|
||||||
while (topScorers.twoPhaseView != null && ! topScorers.twoPhaseView.matches()) {
|
while (topScorers.twoPhaseView != null && ! topScorers.twoPhaseView.matches()) {
|
||||||
topScorers = topScorers.next;
|
topScorers = topScorers.next;
|
||||||
|
@ -90,8 +132,8 @@ abstract class DisjunctionScorer extends Scorer {
|
||||||
if (needsScores) {
|
if (needsScores) {
|
||||||
// if scores or freqs are needed, we also need to remove scorers
|
// if scores or freqs are needed, we also need to remove scorers
|
||||||
// from the top list that do not actually match
|
// from the top list that do not actually match
|
||||||
DisiWrapper<Scorer> previous = topScorers;
|
DisiWrapper previous = topScorers;
|
||||||
for (DisiWrapper<Scorer> w = topScorers.next; w != null; w = w.next) {
|
for (DisiWrapper w = topScorers.next; w != null; w = w.next) {
|
||||||
if (w.twoPhaseView != null && ! w.twoPhaseView.matches()) {
|
if (w.twoPhaseView != null && ! w.twoPhaseView.matches()) {
|
||||||
// w does not match, remove it
|
// w does not match, remove it
|
||||||
previous.next = w.next;
|
previous.next = w.next;
|
||||||
|
@ -119,48 +161,18 @@ abstract class DisjunctionScorer extends Scorer {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public final long cost() {
|
|
||||||
return cost;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int docID() {
|
public final int docID() {
|
||||||
return subScorers.top().doc;
|
return subScorers.top().doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public final int nextDoc() throws IOException {
|
|
||||||
topScorers = null;
|
|
||||||
DisiWrapper<Scorer> top = subScorers.top();
|
|
||||||
final int doc = top.doc;
|
|
||||||
do {
|
|
||||||
top.doc = top.iterator.nextDoc();
|
|
||||||
top = subScorers.updateTop();
|
|
||||||
} while (top.doc == doc);
|
|
||||||
|
|
||||||
return top.doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final int advance(int target) throws IOException {
|
|
||||||
topScorers = null;
|
|
||||||
DisiWrapper<Scorer> top = subScorers.top();
|
|
||||||
do {
|
|
||||||
top.doc = top.iterator.advance(target);
|
|
||||||
top = subScorers.updateTop();
|
|
||||||
} while (top.doc < target);
|
|
||||||
|
|
||||||
return top.doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int freq() throws IOException {
|
public final int freq() throws IOException {
|
||||||
if (topScorers == null) {
|
if (topScorers == null) {
|
||||||
topScorers = subScorers.topList();
|
topScorers = subScorers.topList();
|
||||||
}
|
}
|
||||||
int freq = 1;
|
int freq = 1;
|
||||||
for (DisiWrapper<Scorer> w = topScorers.next; w != null; w = w.next) {
|
for (DisiWrapper w = topScorers.next; w != null; w = w.next) {
|
||||||
freq += 1;
|
freq += 1;
|
||||||
}
|
}
|
||||||
return freq;
|
return freq;
|
||||||
|
@ -175,13 +187,13 @@ abstract class DisjunctionScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Compute the score for the given linked list of scorers. */
|
/** Compute the score for the given linked list of scorers. */
|
||||||
protected abstract float score(DisiWrapper<Scorer> topList) throws IOException;
|
protected abstract float score(DisiWrapper topList) throws IOException;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final Collection<ChildScorer> getChildren() {
|
public final Collection<ChildScorer> getChildren() {
|
||||||
ArrayList<ChildScorer> children = new ArrayList<>();
|
ArrayList<ChildScorer> children = new ArrayList<>();
|
||||||
for (DisiWrapper<Scorer> scorer : subScorers) {
|
for (DisiWrapper scorer : subScorers) {
|
||||||
children.add(new ChildScorer(scorer.iterator, "SHOULD"));
|
children.add(new ChildScorer(scorer.scorer, "SHOULD"));
|
||||||
}
|
}
|
||||||
return children;
|
return children;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/** A Scorer for OR like queries, counterpart of <code>ConjunctionScorer</code>.
|
/** A Scorer for OR like queries, counterpart of <code>ConjunctionScorer</code>.
|
||||||
* This Scorer implements {@link Scorer#advance(int)} and uses advance() on the given Scorers.
|
|
||||||
*/
|
*/
|
||||||
final class DisjunctionSumScorer extends DisjunctionScorer {
|
final class DisjunctionSumScorer extends DisjunctionScorer {
|
||||||
private final float[] coord;
|
private final float[] coord;
|
||||||
|
@ -37,11 +36,11 @@ final class DisjunctionSumScorer extends DisjunctionScorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected float score(DisiWrapper<Scorer> topList) throws IOException {
|
protected float score(DisiWrapper topList) throws IOException {
|
||||||
double score = 0;
|
double score = 0;
|
||||||
int freq = 0;
|
int freq = 0;
|
||||||
for (DisiWrapper<Scorer> w = topList; w != null; w = w.next) {
|
for (DisiWrapper w = topList; w != null; w = w.next) {
|
||||||
score += w.iterator.score();
|
score += w.scorer.score();
|
||||||
freq += 1;
|
freq += 1;
|
||||||
}
|
}
|
||||||
return (float)score * coord[freq];
|
return (float)score * coord[freq];
|
||||||
|
|
|
@ -59,13 +59,13 @@ final class ExactPhraseScorer extends Scorer {
|
||||||
iterators.add(posting.postings);
|
iterators.add(posting.postings);
|
||||||
postingsAndPositions.add(new PostingsAndPosition(posting.postings, posting.position));
|
postingsAndPositions.add(new PostingsAndPosition(posting.postings, posting.position));
|
||||||
}
|
}
|
||||||
conjunction = ConjunctionDISI.intersect(iterators);
|
conjunction = ConjunctionDISI.intersectIterators(iterators);
|
||||||
this.postings = postingsAndPositions.toArray(new PostingsAndPosition[postingsAndPositions.size()]);
|
this.postings = postingsAndPositions.toArray(new PostingsAndPosition[postingsAndPositions.size()]);
|
||||||
this.matchCost = matchCost;
|
this.matchCost = matchCost;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return new TwoPhaseIterator(conjunction) {
|
return new TwoPhaseIterator(conjunction) {
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
|
@ -79,22 +79,9 @@ final class ExactPhraseScorer extends Scorer {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private int doNext(int doc) throws IOException {
|
|
||||||
for (;; doc = conjunction.nextDoc()) {
|
|
||||||
if (doc == NO_MORE_DOCS || phraseFreq() > 0) {
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return doNext(conjunction.nextDoc());
|
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return doNext(conjunction.advance(target));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -180,8 +167,4 @@ final class ExactPhraseScorer extends Scorer {
|
||||||
return this.freq = freq;
|
return this.freq = freq;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return conjunction.cost();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,11 +30,6 @@ final class FakeScorer extends Scorer {
|
||||||
super(null);
|
super(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) {
|
|
||||||
throw new UnsupportedOperationException("FakeScorer doesn't support advance(int)");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return doc;
|
return doc;
|
||||||
|
@ -45,19 +40,14 @@ final class FakeScorer extends Scorer {
|
||||||
return freq;
|
return freq;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() {
|
|
||||||
throw new UnsupportedOperationException("FakeScorer doesn't support nextDoc()");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() {
|
public float score() {
|
||||||
return score;
|
return score;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public DocIdSetIterator iterator() {
|
||||||
return 1;
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -70,22 +70,12 @@ public abstract class FilterScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int nextDoc() throws IOException {
|
public final DocIdSetIterator iterator() {
|
||||||
return in.nextDoc();
|
return in.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int advance(int target) throws IOException {
|
public final TwoPhaseIterator twoPhaseIterator() {
|
||||||
return in.advance(target);
|
return in.twoPhaseIterator();
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return in.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final TwoPhaseIterator asTwoPhaseIterator() {
|
|
||||||
return in.asTwoPhaseIterator();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,7 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
final PriorityQueue<Scorer> pq = new PriorityQueue<Scorer>(scorers.size() - minShouldMatch + 1) {
|
final PriorityQueue<Scorer> pq = new PriorityQueue<Scorer>(scorers.size() - minShouldMatch + 1) {
|
||||||
@Override
|
@Override
|
||||||
protected boolean lessThan(Scorer a, Scorer b) {
|
protected boolean lessThan(Scorer a, Scorer b) {
|
||||||
return a.cost() > b.cost();
|
return a.iterator().cost() > b.iterator().cost();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for (Scorer scorer : scorers) {
|
for (Scorer scorer : scorers) {
|
||||||
|
@ -72,7 +72,7 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
long cost = 0;
|
long cost = 0;
|
||||||
for (Scorer scorer = pq.pop(); scorer != null; scorer = pq.pop()) {
|
for (Scorer scorer = pq.pop(); scorer != null; scorer = pq.pop()) {
|
||||||
cost += scorer.cost();
|
cost += scorer.iterator().cost();
|
||||||
}
|
}
|
||||||
return cost;
|
return cost;
|
||||||
}
|
}
|
||||||
|
@ -82,23 +82,22 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
|
|
||||||
// list of scorers which 'lead' the iteration and are currently
|
// list of scorers which 'lead' the iteration and are currently
|
||||||
// positioned on 'doc'
|
// positioned on 'doc'
|
||||||
DisiWrapper<Scorer> lead;
|
DisiWrapper lead;
|
||||||
int doc; // current doc ID of the leads
|
int doc; // current doc ID of the leads
|
||||||
int freq; // number of scorers on the desired doc ID
|
int freq; // number of scorers on the desired doc ID
|
||||||
|
|
||||||
// priority queue of scorers that are too advanced compared to the current
|
// priority queue of scorers that are too advanced compared to the current
|
||||||
// doc. Ordered by doc ID.
|
// doc. Ordered by doc ID.
|
||||||
final DisiPriorityQueue<Scorer> head;
|
final DisiPriorityQueue head;
|
||||||
|
|
||||||
// priority queue of scorers which are behind the current doc.
|
// priority queue of scorers which are behind the current doc.
|
||||||
// Ordered by cost.
|
// Ordered by cost.
|
||||||
final DisiWrapper<Scorer>[] tail;
|
final DisiWrapper[] tail;
|
||||||
int tailSize;
|
int tailSize;
|
||||||
|
|
||||||
final Collection<ChildScorer> childScorers;
|
final Collection<ChildScorer> childScorers;
|
||||||
final long cost;
|
final long cost;
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked","rawtypes"})
|
|
||||||
MinShouldMatchSumScorer(Weight weight, Collection<Scorer> scorers, int minShouldMatch, float[] coord) {
|
MinShouldMatchSumScorer(Weight weight, Collection<Scorer> scorers, int minShouldMatch, float[] coord) {
|
||||||
super(weight);
|
super(weight);
|
||||||
|
|
||||||
|
@ -113,13 +112,13 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
this.coord = coord;
|
this.coord = coord;
|
||||||
this.doc = -1;
|
this.doc = -1;
|
||||||
|
|
||||||
head = new DisiPriorityQueue<Scorer>(scorers.size() - minShouldMatch + 1);
|
head = new DisiPriorityQueue(scorers.size() - minShouldMatch + 1);
|
||||||
// there can be at most minShouldMatch - 1 scorers beyond the current position
|
// there can be at most minShouldMatch - 1 scorers beyond the current position
|
||||||
// otherwise we might be skipping over matching documents
|
// otherwise we might be skipping over matching documents
|
||||||
tail = new DisiWrapper[minShouldMatch - 1];
|
tail = new DisiWrapper[minShouldMatch - 1];
|
||||||
|
|
||||||
for (Scorer scorer : scorers) {
|
for (Scorer scorer : scorers) {
|
||||||
addLead(new DisiWrapper<Scorer>(scorer));
|
addLead(new DisiWrapper(scorer));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<ChildScorer> children = new ArrayList<>();
|
List<ChildScorer> children = new ArrayList<>();
|
||||||
|
@ -131,13 +130,18 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public final Collection<ChildScorer> getChildren() {
|
||||||
return cost;
|
return childScorers;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final Collection<ChildScorer> getChildren() {
|
public DocIdSetIterator iterator() {
|
||||||
return childScorers;
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
assert doc == lead.doc;
|
||||||
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -145,8 +149,8 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
// We are moving to the next doc ID, so scorers in 'lead' need to go in
|
// We are moving to the next doc ID, so scorers in 'lead' need to go in
|
||||||
// 'tail'. If there is not enough space in 'tail', then we take the least
|
// 'tail'. If there is not enough space in 'tail', then we take the least
|
||||||
// costly scorers and advance them.
|
// costly scorers and advance them.
|
||||||
for (DisiWrapper<Scorer> s = lead; s != null; s = s.next) {
|
for (DisiWrapper s = lead; s != null; s = s.next) {
|
||||||
final DisiWrapper<Scorer> evicted = insertTailWithOverFlow(s);
|
final DisiWrapper evicted = insertTailWithOverFlow(s);
|
||||||
if (evicted != null) {
|
if (evicted != null) {
|
||||||
if (evicted.doc == doc) {
|
if (evicted.doc == doc) {
|
||||||
evicted.doc = evicted.iterator.nextDoc();
|
evicted.doc = evicted.iterator.nextDoc();
|
||||||
|
@ -164,8 +168,8 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
// Same logic as in nextDoc
|
// Same logic as in nextDoc
|
||||||
for (DisiWrapper<Scorer> s = lead; s != null; s = s.next) {
|
for (DisiWrapper s = lead; s != null; s = s.next) {
|
||||||
final DisiWrapper<Scorer> evicted = insertTailWithOverFlow(s);
|
final DisiWrapper evicted = insertTailWithOverFlow(s);
|
||||||
if (evicted != null) {
|
if (evicted != null) {
|
||||||
evicted.doc = evicted.iterator.advance(target);
|
evicted.doc = evicted.iterator.advance(target);
|
||||||
head.add(evicted);
|
head.add(evicted);
|
||||||
|
@ -174,9 +178,9 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
|
|
||||||
// But this time there might also be scorers in 'head' behind the desired
|
// But this time there might also be scorers in 'head' behind the desired
|
||||||
// target so we need to do the same thing that we did on 'lead' on 'head'
|
// target so we need to do the same thing that we did on 'lead' on 'head'
|
||||||
DisiWrapper<Scorer> headTop = head.top();
|
DisiWrapper headTop = head.top();
|
||||||
while (headTop.doc < target) {
|
while (headTop.doc < target) {
|
||||||
final DisiWrapper<Scorer> evicted = insertTailWithOverFlow(headTop);
|
final DisiWrapper evicted = insertTailWithOverFlow(headTop);
|
||||||
// We know that the tail is full since it contains at most
|
// We know that the tail is full since it contains at most
|
||||||
// minShouldMatch - 1 entries and we just moved at least minShouldMatch
|
// minShouldMatch - 1 entries and we just moved at least minShouldMatch
|
||||||
// entries to it, so evicted is not null
|
// entries to it, so evicted is not null
|
||||||
|
@ -188,19 +192,26 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
return doNext();
|
return doNext();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addLead(DisiWrapper<Scorer> lead) {
|
@Override
|
||||||
|
public long cost() {
|
||||||
|
return cost;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addLead(DisiWrapper lead) {
|
||||||
lead.next = this.lead;
|
lead.next = this.lead;
|
||||||
this.lead = lead;
|
this.lead = lead;
|
||||||
freq += 1;
|
freq += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void pushBackLeads() throws IOException {
|
private void pushBackLeads() throws IOException {
|
||||||
for (DisiWrapper<Scorer> s = lead; s != null; s = s.next) {
|
for (DisiWrapper s = lead; s != null; s = s.next) {
|
||||||
addTail(s);
|
addTail(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void advanceTail(DisiWrapper<Scorer> top) throws IOException {
|
private void advanceTail(DisiWrapper top) throws IOException {
|
||||||
top.doc = top.iterator.advance(doc);
|
top.doc = top.iterator.advance(doc);
|
||||||
if (top.doc == doc) {
|
if (top.doc == doc) {
|
||||||
addLead(top);
|
addLead(top);
|
||||||
|
@ -210,7 +221,7 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void advanceTail() throws IOException {
|
private void advanceTail() throws IOException {
|
||||||
final DisiWrapper<Scorer> top = popTail();
|
final DisiWrapper top = popTail();
|
||||||
advanceTail(top);
|
advanceTail(top);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -276,8 +287,8 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
// we need to know about all matches
|
// we need to know about all matches
|
||||||
updateFreq();
|
updateFreq();
|
||||||
double score = 0;
|
double score = 0;
|
||||||
for (DisiWrapper<Scorer> s = lead; s != null; s = s.next) {
|
for (DisiWrapper s = lead; s != null; s = s.next) {
|
||||||
score += s.iterator.score();
|
score += s.scorer.score();
|
||||||
}
|
}
|
||||||
return coord[freq] * (float) score;
|
return coord[freq] * (float) score;
|
||||||
}
|
}
|
||||||
|
@ -289,12 +300,12 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Insert an entry in 'tail' and evict the least-costly scorer if full. */
|
/** Insert an entry in 'tail' and evict the least-costly scorer if full. */
|
||||||
private DisiWrapper<Scorer> insertTailWithOverFlow(DisiWrapper<Scorer> s) {
|
private DisiWrapper insertTailWithOverFlow(DisiWrapper s) {
|
||||||
if (tailSize < tail.length) {
|
if (tailSize < tail.length) {
|
||||||
addTail(s);
|
addTail(s);
|
||||||
return null;
|
return null;
|
||||||
} else if (tail.length >= 1) {
|
} else if (tail.length >= 1) {
|
||||||
final DisiWrapper<Scorer> top = tail[0];
|
final DisiWrapper top = tail[0];
|
||||||
if (top.cost < s.cost) {
|
if (top.cost < s.cost) {
|
||||||
tail[0] = s;
|
tail[0] = s;
|
||||||
downHeapCost(tail, tailSize);
|
downHeapCost(tail, tailSize);
|
||||||
|
@ -305,16 +316,16 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Add an entry to 'tail'. Fails if over capacity. */
|
/** Add an entry to 'tail'. Fails if over capacity. */
|
||||||
private void addTail(DisiWrapper<Scorer> s) {
|
private void addTail(DisiWrapper s) {
|
||||||
tail[tailSize] = s;
|
tail[tailSize] = s;
|
||||||
upHeapCost(tail, tailSize);
|
upHeapCost(tail, tailSize);
|
||||||
tailSize += 1;
|
tailSize += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Pop the least-costly scorer from 'tail'. */
|
/** Pop the least-costly scorer from 'tail'. */
|
||||||
private DisiWrapper<Scorer> popTail() {
|
private DisiWrapper popTail() {
|
||||||
assert tailSize > 0;
|
assert tailSize > 0;
|
||||||
final DisiWrapper<Scorer> result = tail[0];
|
final DisiWrapper result = tail[0];
|
||||||
tail[0] = tail[--tailSize];
|
tail[0] = tail[--tailSize];
|
||||||
downHeapCost(tail, tailSize);
|
downHeapCost(tail, tailSize);
|
||||||
return result;
|
return result;
|
||||||
|
@ -322,8 +333,8 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
|
|
||||||
/** Heap helpers */
|
/** Heap helpers */
|
||||||
|
|
||||||
private static void upHeapCost(DisiWrapper<Scorer>[] heap, int i) {
|
private static void upHeapCost(DisiWrapper[] heap, int i) {
|
||||||
final DisiWrapper<Scorer> node = heap[i];
|
final DisiWrapper node = heap[i];
|
||||||
final long nodeCost = node.cost;
|
final long nodeCost = node.cost;
|
||||||
int j = parentNode(i);
|
int j = parentNode(i);
|
||||||
while (j >= 0 && nodeCost < heap[j].cost) {
|
while (j >= 0 && nodeCost < heap[j].cost) {
|
||||||
|
@ -334,9 +345,9 @@ final class MinShouldMatchSumScorer extends Scorer {
|
||||||
heap[i] = node;
|
heap[i] = node;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void downHeapCost(DisiWrapper<Scorer>[] heap, int size) {
|
private static void downHeapCost(DisiWrapper[] heap, int size) {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
final DisiWrapper<Scorer> node = heap[0];
|
final DisiWrapper node = heap[0];
|
||||||
int j = leftNode(i);
|
int j = leftNode(i);
|
||||||
if (j < size) {
|
if (j < size) {
|
||||||
int k = rightNode(j);
|
int k = rightNode(j);
|
||||||
|
|
|
@ -241,7 +241,7 @@ public class MultiPhraseQuery extends Query {
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
Scorer scorer = scorer(context);
|
Scorer scorer = scorer(context);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
int newDoc = scorer.advance(doc);
|
int newDoc = scorer.iterator().advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
|
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
|
||||||
SimScorer docScorer = similarity.simScorer(stats, context);
|
SimScorer docScorer = similarity.simScorer(stats, context);
|
||||||
|
|
|
@ -453,7 +453,7 @@ public class PhraseQuery extends Query {
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
Scorer scorer = scorer(context);
|
Scorer scorer = scorer(context);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
int newDoc = scorer.advance(doc);
|
int newDoc = scorer.iterator().advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
|
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
|
||||||
SimScorer docScorer = similarity.simScorer(stats, context);
|
SimScorer docScorer = similarity.simScorer(stats, context);
|
||||||
|
|
|
@ -89,7 +89,7 @@ public abstract class QueryRescorer extends Rescorer {
|
||||||
int targetDoc = docID - docBase;
|
int targetDoc = docID - docBase;
|
||||||
int actualDoc = scorer.docID();
|
int actualDoc = scorer.docID();
|
||||||
if (actualDoc < targetDoc) {
|
if (actualDoc < targetDoc) {
|
||||||
actualDoc = scorer.advance(targetDoc);
|
actualDoc = scorer.iterator().advance(targetDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actualDoc == targetDoc) {
|
if (actualDoc == targetDoc) {
|
||||||
|
|
|
@ -23,9 +23,6 @@ import java.util.Collections;
|
||||||
|
|
||||||
/** A Scorer for queries with a required subscorer
|
/** A Scorer for queries with a required subscorer
|
||||||
* and an excluding (prohibited) sub {@link Scorer}.
|
* and an excluding (prohibited) sub {@link Scorer}.
|
||||||
* <br>
|
|
||||||
* This <code>Scorer</code> implements {@link Scorer#advance(int)},
|
|
||||||
* and it uses the advance() on the given scorers.
|
|
||||||
*/
|
*/
|
||||||
class ReqExclScorer extends Scorer {
|
class ReqExclScorer extends Scorer {
|
||||||
|
|
||||||
|
@ -44,25 +41,20 @@ class ReqExclScorer extends Scorer {
|
||||||
public ReqExclScorer(Scorer reqScorer, Scorer exclScorer) {
|
public ReqExclScorer(Scorer reqScorer, Scorer exclScorer) {
|
||||||
super(reqScorer.weight);
|
super(reqScorer.weight);
|
||||||
this.reqScorer = reqScorer;
|
this.reqScorer = reqScorer;
|
||||||
reqTwoPhaseIterator = reqScorer.asTwoPhaseIterator();
|
reqTwoPhaseIterator = reqScorer.twoPhaseIterator();
|
||||||
if (reqTwoPhaseIterator == null) {
|
if (reqTwoPhaseIterator == null) {
|
||||||
reqApproximation = reqScorer;
|
reqApproximation = reqScorer.iterator();
|
||||||
} else {
|
} else {
|
||||||
reqApproximation = reqTwoPhaseIterator.approximation();
|
reqApproximation = reqTwoPhaseIterator.approximation();
|
||||||
}
|
}
|
||||||
exclTwoPhaseIterator = exclScorer.asTwoPhaseIterator();
|
exclTwoPhaseIterator = exclScorer.twoPhaseIterator();
|
||||||
if (exclTwoPhaseIterator == null) {
|
if (exclTwoPhaseIterator == null) {
|
||||||
exclApproximation = exclScorer;
|
exclApproximation = exclScorer.iterator();
|
||||||
} else {
|
} else {
|
||||||
exclApproximation = exclTwoPhaseIterator.approximation();
|
exclApproximation = exclTwoPhaseIterator.approximation();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return toNonExcluded(reqApproximation.nextDoc());
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Confirms whether or not the given {@link TwoPhaseIterator}
|
/** Confirms whether or not the given {@link TwoPhaseIterator}
|
||||||
* matches on the current document. */
|
* matches on the current document. */
|
||||||
private static boolean matches(TwoPhaseIterator it) throws IOException {
|
private static boolean matches(TwoPhaseIterator it) throws IOException {
|
||||||
|
@ -85,6 +77,10 @@ class ReqExclScorer extends Scorer {
|
||||||
return matches(reqTwoPhaseIterator);
|
return matches(reqTwoPhaseIterator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
/** Advance to the next non-excluded doc. */
|
/** Advance to the next non-excluded doc. */
|
||||||
private int toNonExcluded(int doc) throws IOException {
|
private int toNonExcluded(int doc) throws IOException {
|
||||||
int exclDoc = exclApproximation.docID();
|
int exclDoc = exclApproximation.docID();
|
||||||
|
@ -101,9 +97,32 @@ class ReqExclScorer extends Scorer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int nextDoc() throws IOException {
|
||||||
|
return toNonExcluded(reqApproximation.nextDoc());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int advance(int target) throws IOException {
|
||||||
|
return toNonExcluded(reqApproximation.advance(target));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return reqScorer.docID();
|
return reqApproximation.docID();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long cost() {
|
||||||
|
return reqApproximation.cost();
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return reqApproximation.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -111,11 +130,6 @@ class ReqExclScorer extends Scorer {
|
||||||
return reqScorer.freq();
|
return reqScorer.freq();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return reqScorer.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
return reqScorer.score(); // reqScorer may be null when next() or skipTo() already return false
|
return reqScorer.score(); // reqScorer may be null when next() or skipTo() already return false
|
||||||
|
@ -127,12 +141,7 @@ class ReqExclScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return toNonExcluded(reqApproximation.advance(target));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
|
||||||
if (reqTwoPhaseIterator == null) {
|
if (reqTwoPhaseIterator == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,15 +22,14 @@ import java.util.Collection;
|
||||||
|
|
||||||
/** A Scorer for queries with a required part and an optional part.
|
/** A Scorer for queries with a required part and an optional part.
|
||||||
* Delays skipTo() on the optional part until a score() is needed.
|
* Delays skipTo() on the optional part until a score() is needed.
|
||||||
* <br>
|
|
||||||
* This <code>Scorer</code> implements {@link Scorer#advance(int)}.
|
|
||||||
*/
|
*/
|
||||||
class ReqOptSumScorer extends Scorer {
|
class ReqOptSumScorer extends Scorer {
|
||||||
/** The scorers passed from the constructor.
|
/** The scorers passed from the constructor.
|
||||||
* These are set to null as soon as their next() or skipTo() returns false.
|
* These are set to null as soon as their next() or skipTo() returns false.
|
||||||
*/
|
*/
|
||||||
protected final Scorer reqScorer;
|
protected final Scorer reqScorer;
|
||||||
protected Scorer optScorer;
|
protected final Scorer optScorer;
|
||||||
|
protected final DocIdSetIterator optIterator;
|
||||||
|
|
||||||
/** Construct a <code>ReqOptScorer</code>.
|
/** Construct a <code>ReqOptScorer</code>.
|
||||||
* @param reqScorer The required scorer. This must match.
|
* @param reqScorer The required scorer. This must match.
|
||||||
|
@ -45,21 +44,17 @@ class ReqOptSumScorer extends Scorer {
|
||||||
assert optScorer != null;
|
assert optScorer != null;
|
||||||
this.reqScorer = reqScorer;
|
this.reqScorer = reqScorer;
|
||||||
this.optScorer = optScorer;
|
this.optScorer = optScorer;
|
||||||
|
this.optIterator = optScorer.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return reqScorer.asTwoPhaseIterator();
|
return reqScorer.twoPhaseIterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return reqScorer.nextDoc();
|
return reqScorer.iterator();
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return reqScorer.advance(target);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -68,7 +63,7 @@ class ReqOptSumScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the score of the current document matching the query.
|
/** Returns the score of the current document matching the query.
|
||||||
* Initially invalid, until {@link #nextDoc()} is called the first time.
|
* Initially invalid, until the {@link #iterator()} is advanced the first time.
|
||||||
* @return The score of the required scorer, eventually increased by the score
|
* @return The score of the required scorer, eventually increased by the score
|
||||||
* of the optional scorer when it also matches the current document.
|
* of the optional scorer when it also matches the current document.
|
||||||
*/
|
*/
|
||||||
|
@ -76,25 +71,25 @@ class ReqOptSumScorer extends Scorer {
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
// TODO: sum into a double and cast to float if we ever send required clauses to BS1
|
// TODO: sum into a double and cast to float if we ever send required clauses to BS1
|
||||||
int curDoc = reqScorer.docID();
|
int curDoc = reqScorer.docID();
|
||||||
float reqScore = reqScorer.score();
|
float score = reqScorer.score();
|
||||||
if (optScorer == null) {
|
|
||||||
return reqScore;
|
int optScorerDoc = optIterator.docID();
|
||||||
|
if (optScorerDoc < curDoc) {
|
||||||
|
optScorerDoc = optIterator.advance(curDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
int optScorerDoc = optScorer.docID();
|
if (optScorerDoc == curDoc) {
|
||||||
if (optScorerDoc < curDoc && (optScorerDoc = optScorer.advance(curDoc)) == NO_MORE_DOCS) {
|
score += optScorer.score();
|
||||||
optScorer = null;
|
|
||||||
return reqScore;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return optScorerDoc == curDoc ? reqScore + optScorer.score() : reqScore;
|
return score;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int freq() throws IOException {
|
public int freq() throws IOException {
|
||||||
// we might have deferred advance()
|
// we might have deferred advance()
|
||||||
score();
|
score();
|
||||||
return (optScorer != null && optScorer.docID() == reqScorer.docID()) ? 2 : 1;
|
return optIterator.docID() == reqScorer.docID() ? 2 : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -105,9 +100,5 @@ class ReqOptSumScorer extends Scorer {
|
||||||
return children;
|
return children;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return reqScorer.cost();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,8 +25,8 @@ import java.util.Collections;
|
||||||
* Expert: Common scoring functionality for different types of queries.
|
* Expert: Common scoring functionality for different types of queries.
|
||||||
*
|
*
|
||||||
* <p>
|
* <p>
|
||||||
* A <code>Scorer</code> iterates over documents matching a
|
* A <code>Scorer</code> exposes an {@link #iterator()} over documents
|
||||||
* query in increasing order of doc Id.
|
* matching a query in increasing order of doc Id.
|
||||||
* </p>
|
* </p>
|
||||||
* <p>
|
* <p>
|
||||||
* Document scores are computed using a given <code>Similarity</code>
|
* Document scores are computed using a given <code>Similarity</code>
|
||||||
|
@ -39,7 +39,7 @@ import java.util.Collections;
|
||||||
* TopScoreDocCollector}) will not properly collect hits
|
* TopScoreDocCollector}) will not properly collect hits
|
||||||
* with these scores.
|
* with these scores.
|
||||||
*/
|
*/
|
||||||
public abstract class Scorer extends DocIdSetIterator {
|
public abstract class Scorer {
|
||||||
/** the Scorer's parent Weight. in some cases this may be null */
|
/** the Scorer's parent Weight. in some cases this may be null */
|
||||||
// TODO can we clean this up?
|
// TODO can we clean this up?
|
||||||
protected final Weight weight;
|
protected final Weight weight;
|
||||||
|
@ -52,10 +52,18 @@ public abstract class Scorer extends DocIdSetIterator {
|
||||||
this.weight = weight;
|
this.weight = weight;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the doc ID that is currently being scored.
|
||||||
|
* This will return {@code -1} if the {@link #iterator()} is not positioned
|
||||||
|
* or {@link DocIdSetIterator#NO_MORE_DOCS} if it has been entirely consumed.
|
||||||
|
* @see DocIdSetIterator#docID()
|
||||||
|
*/
|
||||||
|
public abstract int docID();
|
||||||
|
|
||||||
/** Returns the score of the current document matching the query.
|
/** Returns the score of the current document matching the query.
|
||||||
* Initially invalid, until {@link #nextDoc()} or {@link #advance(int)}
|
* Initially invalid, until {@link DocIdSetIterator#nextDoc()} or
|
||||||
* is called the first time, or when called from within
|
* {@link DocIdSetIterator#advance(int)} is called on the {@link #iterator()}
|
||||||
* {@link LeafCollector#collect}.
|
* the first time, or when called from within {@link LeafCollector#collect}.
|
||||||
*/
|
*/
|
||||||
public abstract float score() throws IOException;
|
public abstract float score() throws IOException;
|
||||||
|
|
||||||
|
@ -101,6 +109,19 @@ public abstract class Scorer extends DocIdSetIterator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a {@link DocIdSetIterator} over matching documents.
|
||||||
|
*
|
||||||
|
* The returned iterator will either be positioned on {@code -1} if no
|
||||||
|
* documents have been scored yet, {@link DocIdSetIterator#NO_MORE_DOCS}
|
||||||
|
* if all documents have been scored already, or the last document id that
|
||||||
|
* has been scored otherwise.
|
||||||
|
*
|
||||||
|
* The returned iterator is a view: calling this method several times will
|
||||||
|
* return iterators that have the same state.
|
||||||
|
*/
|
||||||
|
public abstract DocIdSetIterator iterator();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optional method: Return a {@link TwoPhaseIterator} view of this
|
* Optional method: Return a {@link TwoPhaseIterator} view of this
|
||||||
* {@link Scorer}. A return value of {@code null} indicates that
|
* {@link Scorer}. A return value of {@code null} indicates that
|
||||||
|
@ -108,15 +129,15 @@ public abstract class Scorer extends DocIdSetIterator {
|
||||||
*
|
*
|
||||||
* Note that the returned {@link TwoPhaseIterator}'s
|
* Note that the returned {@link TwoPhaseIterator}'s
|
||||||
* {@link TwoPhaseIterator#approximation() approximation} must
|
* {@link TwoPhaseIterator#approximation() approximation} must
|
||||||
* advance synchronously with this iterator: advancing the approximation must
|
* advance synchronously with the {@link #iterator()}: advancing the
|
||||||
* advance this iterator and vice-versa.
|
* approximation must advance the iterator and vice-versa.
|
||||||
*
|
*
|
||||||
* Implementing this method is typically useful on {@link Scorer}s
|
* Implementing this method is typically useful on {@link Scorer}s
|
||||||
* that have a high per-document overhead in order to confirm matches.
|
* that have a high per-document overhead in order to confirm matches.
|
||||||
*
|
*
|
||||||
* The default implementation returns {@code null}.
|
* The default implementation returns {@code null}.
|
||||||
*/
|
*/
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ final class SloppyPhraseScorer extends Scorer {
|
||||||
iterators[i] = postings[i].postings;
|
iterators[i] = postings[i].postings;
|
||||||
phrasePositions[i] = new PhrasePositions(postings[i].postings, postings[i].position, i, postings[i].terms);
|
phrasePositions[i] = new PhrasePositions(postings[i].postings, postings[i].position, i, postings[i].terms);
|
||||||
}
|
}
|
||||||
conjunction = ConjunctionDISI.intersect(Arrays.asList(iterators));
|
conjunction = ConjunctionDISI.intersectIterators(Arrays.asList(iterators));
|
||||||
this.matchCost = matchCost;
|
this.matchCost = matchCost;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -551,48 +551,16 @@ final class SloppyPhraseScorer extends Scorer {
|
||||||
return conjunction.docID();
|
return conjunction.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
int doc;
|
|
||||||
for (doc = conjunction.nextDoc(); doc != NO_MORE_DOCS; doc = conjunction.nextDoc()) {
|
|
||||||
sloppyFreq = phraseFreq(); // check for phrase
|
|
||||||
if (sloppyFreq != 0f) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() {
|
public float score() {
|
||||||
return docScorer.score(docID(), sloppyFreq);
|
return docScorer.score(docID(), sloppyFreq);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
assert target > docID();
|
|
||||||
int doc;
|
|
||||||
for (doc = conjunction.advance(target); doc != NO_MORE_DOCS; doc = conjunction.nextDoc()) {
|
|
||||||
sloppyFreq = phraseFreq(); // check for phrase
|
|
||||||
if (sloppyFreq != 0f) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return conjunction.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() { return "scorer(" + weight + ")"; }
|
public String toString() { return "scorer(" + weight + ")"; }
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return new TwoPhaseIterator(conjunction) {
|
return new TwoPhaseIterator(conjunction) {
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
|
@ -611,4 +579,9 @@ final class SloppyPhraseScorer extends Scorer {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,7 @@ public final class SynonymQuery extends Query {
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
Scorer scorer = scorer(context);
|
Scorer scorer = scorer(context);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
int newDoc = scorer.advance(doc);
|
int newDoc = scorer.iterator().advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
final float freq;
|
final float freq;
|
||||||
if (scorer instanceof SynonymScorer) {
|
if (scorer instanceof SynonymScorer) {
|
||||||
|
@ -229,15 +229,15 @@ public final class SynonymQuery extends Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected float score(DisiWrapper<Scorer> topList) throws IOException {
|
protected float score(DisiWrapper topList) throws IOException {
|
||||||
return similarity.score(topList.doc, tf(topList));
|
return similarity.score(topList.doc, tf(topList));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** combines TF of all subs. */
|
/** combines TF of all subs. */
|
||||||
final int tf(DisiWrapper<Scorer> topList) throws IOException {
|
final int tf(DisiWrapper topList) throws IOException {
|
||||||
int tf = 0;
|
int tf = 0;
|
||||||
for (DisiWrapper<Scorer> w = topList; w != null; w = w.next) {
|
for (DisiWrapper w = topList; w != null; w = w.next) {
|
||||||
tf += w.iterator.freq();
|
tf += w.scorer.freq();
|
||||||
}
|
}
|
||||||
return tf;
|
return tf;
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,7 +136,7 @@ public class TermQuery extends Query {
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
Scorer scorer = scorer(context);
|
Scorer scorer = scorer(context);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
int newDoc = scorer.advance(doc);
|
int newDoc = scorer.iterator().advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
float freq = scorer.freq();
|
float freq = scorer.freq();
|
||||||
SimScorer docScorer = similarity.simScorer(stats, context);
|
SimScorer docScorer = similarity.simScorer(stats, context);
|
||||||
|
|
|
@ -55,41 +55,17 @@ final class TermScorer extends Scorer {
|
||||||
return postingsEnum.freq();
|
return postingsEnum.freq();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Advances to the next document matching the query. <br>
|
|
||||||
*
|
|
||||||
* @return the document matching the query or NO_MORE_DOCS if there are no more documents.
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return postingsEnum.nextDoc();
|
return postingsEnum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
assert docID() != NO_MORE_DOCS;
|
assert docID() != DocIdSetIterator.NO_MORE_DOCS;
|
||||||
return docScorer.score(postingsEnum.docID(), postingsEnum.freq());
|
return docScorer.score(postingsEnum.docID(), postingsEnum.freq());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Advances to the first match beyond the current whose document number is
|
|
||||||
* greater than or equal to a given target. <br>
|
|
||||||
* The implementation uses {@link org.apache.lucene.index.PostingsEnum#advance(int)}.
|
|
||||||
*
|
|
||||||
* @param target
|
|
||||||
* The target document number.
|
|
||||||
* @return the matching document or NO_MORE_DOCS if none exist.
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return postingsEnum.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return postingsEnum.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Returns a string representation of this <code>TermScorer</code>. */
|
/** Returns a string representation of this <code>TermScorer</code>. */
|
||||||
@Override
|
@Override
|
||||||
public String toString() { return "scorer(" + weight + ")[" + super.toString() + "]"; }
|
public String toString() { return "scorer(" + weight + ")[" + super.toString() + "]"; }
|
||||||
|
|
|
@ -21,7 +21,7 @@ import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returned by {@link Scorer#asTwoPhaseIterator()}
|
* Returned by {@link Scorer#twoPhaseIterator()}
|
||||||
* to expose an approximation of a {@link DocIdSetIterator}.
|
* to expose an approximation of a {@link DocIdSetIterator}.
|
||||||
* When the {@link #approximation()}'s
|
* When the {@link #approximation()}'s
|
||||||
* {@link DocIdSetIterator#nextDoc()} or {@link DocIdSetIterator#advance(int)}
|
* {@link DocIdSetIterator#nextDoc()} or {@link DocIdSetIterator#advance(int)}
|
||||||
|
@ -98,14 +98,4 @@ public abstract class TwoPhaseIterator {
|
||||||
*/
|
*/
|
||||||
public abstract float matchCost();
|
public abstract float matchCost();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a {@link TwoPhaseIterator} for this {@link DocIdSetIterator}
|
|
||||||
* when available, otherwise returns null.
|
|
||||||
*/
|
|
||||||
public static TwoPhaseIterator asTwoPhaseIterator(DocIdSetIterator iter) {
|
|
||||||
return (iter instanceof Scorer)
|
|
||||||
? ((Scorer) iter).asTwoPhaseIterator()
|
|
||||||
: null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,6 +147,8 @@ public abstract class Weight {
|
||||||
* @lucene.internal */
|
* @lucene.internal */
|
||||||
protected static class DefaultBulkScorer extends BulkScorer {
|
protected static class DefaultBulkScorer extends BulkScorer {
|
||||||
private final Scorer scorer;
|
private final Scorer scorer;
|
||||||
|
private final DocIdSetIterator iterator;
|
||||||
|
private final TwoPhaseIterator twoPhase;
|
||||||
|
|
||||||
/** Sole constructor. */
|
/** Sole constructor. */
|
||||||
public DefaultBulkScorer(Scorer scorer) {
|
public DefaultBulkScorer(Scorer scorer) {
|
||||||
|
@ -154,30 +156,31 @@ public abstract class Weight {
|
||||||
throw new NullPointerException();
|
throw new NullPointerException();
|
||||||
}
|
}
|
||||||
this.scorer = scorer;
|
this.scorer = scorer;
|
||||||
|
this.iterator = scorer.iterator();
|
||||||
|
this.twoPhase = scorer.twoPhaseIterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return scorer.cost();
|
return iterator.cost();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException {
|
public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException {
|
||||||
collector.setScorer(scorer);
|
collector.setScorer(scorer);
|
||||||
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
|
|
||||||
if (scorer.docID() == -1 && min == 0 && max == DocIdSetIterator.NO_MORE_DOCS) {
|
if (scorer.docID() == -1 && min == 0 && max == DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
scoreAll(collector, scorer, twoPhase, acceptDocs);
|
scoreAll(collector, iterator, twoPhase, acceptDocs);
|
||||||
return DocIdSetIterator.NO_MORE_DOCS;
|
return DocIdSetIterator.NO_MORE_DOCS;
|
||||||
} else {
|
} else {
|
||||||
int doc = scorer.docID();
|
int doc = scorer.docID();
|
||||||
if (doc < min) {
|
if (doc < min) {
|
||||||
if (twoPhase == null) {
|
if (twoPhase == null) {
|
||||||
doc = scorer.advance(min);
|
doc = iterator.advance(min);
|
||||||
} else {
|
} else {
|
||||||
doc = twoPhase.approximation().advance(min);
|
doc = twoPhase.approximation().advance(min);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return scoreRange(collector, scorer, twoPhase, acceptDocs, doc, max);
|
return scoreRange(collector, iterator, twoPhase, acceptDocs, doc, max);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,14 +188,14 @@ public abstract class Weight {
|
||||||
* separate this from {@link #scoreAll} to help out
|
* separate this from {@link #scoreAll} to help out
|
||||||
* hotspot.
|
* hotspot.
|
||||||
* See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
|
* See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
|
||||||
static int scoreRange(LeafCollector collector, Scorer scorer, TwoPhaseIterator twoPhase,
|
static int scoreRange(LeafCollector collector, DocIdSetIterator iterator, TwoPhaseIterator twoPhase,
|
||||||
Bits acceptDocs, int currentDoc, int end) throws IOException {
|
Bits acceptDocs, int currentDoc, int end) throws IOException {
|
||||||
if (twoPhase == null) {
|
if (twoPhase == null) {
|
||||||
while (currentDoc < end) {
|
while (currentDoc < end) {
|
||||||
if (acceptDocs == null || acceptDocs.get(currentDoc)) {
|
if (acceptDocs == null || acceptDocs.get(currentDoc)) {
|
||||||
collector.collect(currentDoc);
|
collector.collect(currentDoc);
|
||||||
}
|
}
|
||||||
currentDoc = scorer.nextDoc();
|
currentDoc = iterator.nextDoc();
|
||||||
}
|
}
|
||||||
return currentDoc;
|
return currentDoc;
|
||||||
} else {
|
} else {
|
||||||
|
@ -211,9 +214,9 @@ public abstract class Weight {
|
||||||
* separate this from {@link #scoreRange} to help out
|
* separate this from {@link #scoreRange} to help out
|
||||||
* hotspot.
|
* hotspot.
|
||||||
* See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
|
* See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
|
||||||
static void scoreAll(LeafCollector collector, Scorer scorer, TwoPhaseIterator twoPhase, Bits acceptDocs) throws IOException {
|
static void scoreAll(LeafCollector collector, DocIdSetIterator iterator, TwoPhaseIterator twoPhase, Bits acceptDocs) throws IOException {
|
||||||
if (twoPhase == null) {
|
if (twoPhase == null) {
|
||||||
for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
|
for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
|
||||||
if (acceptDocs == null || acceptDocs.get(doc)) {
|
if (acceptDocs == null || acceptDocs.get(doc)) {
|
||||||
collector.collect(doc);
|
collector.collect(doc);
|
||||||
}
|
}
|
||||||
|
|
|
@ -427,13 +427,14 @@
|
||||||
* <p>The
|
* <p>The
|
||||||
* {@link org.apache.lucene.search.Scorer Scorer}
|
* {@link org.apache.lucene.search.Scorer Scorer}
|
||||||
* abstract class provides common scoring functionality for all Scorer implementations and
|
* abstract class provides common scoring functionality for all Scorer implementations and
|
||||||
* is the heart of the Lucene scoring process. The Scorer defines the following abstract (some of them are not
|
* is the heart of the Lucene scoring process. The Scorer defines the following methods which
|
||||||
* yet abstract, but will be in future versions and should be considered as such now) methods which
|
* must be implemented:
|
||||||
* must be implemented (some of them inherited from {@link org.apache.lucene.search.DocIdSetIterator DocIdSetIterator}):
|
|
||||||
* <ol>
|
* <ol>
|
||||||
* <li>
|
* <li>
|
||||||
* {@link org.apache.lucene.search.Scorer#nextDoc nextDoc()} — Advances to the next
|
* {@link org.apache.lucene.search.Scorer#iterator iterator()} — Return a
|
||||||
* document that matches this Query, returning true if and only if there is another document that matches.</li>
|
* {@link org.apache.lucene.search.DocIdSetIterator DocIdSetIterator} that can iterate over all
|
||||||
|
* document that matches this Query.
|
||||||
|
* </li>
|
||||||
* <li>
|
* <li>
|
||||||
* {@link org.apache.lucene.search.Scorer#docID docID()} — Returns the id of the
|
* {@link org.apache.lucene.search.Scorer#docID docID()} — Returns the id of the
|
||||||
* {@link org.apache.lucene.document.Document Document} that contains the match.
|
* {@link org.apache.lucene.document.Document Document} that contains the match.
|
||||||
|
@ -451,13 +452,6 @@
|
||||||
* {@link org.apache.lucene.index.PostingsEnum#freq PostingsEnum.freq()}.
|
* {@link org.apache.lucene.index.PostingsEnum#freq PostingsEnum.freq()}.
|
||||||
* </li>
|
* </li>
|
||||||
* <li>
|
* <li>
|
||||||
* {@link org.apache.lucene.search.Scorer#advance advance()} — Skip ahead in
|
|
||||||
* the document matches to the document whose id is greater than
|
|
||||||
* or equal to the passed in value. In many instances, advance can be
|
|
||||||
* implemented more efficiently than simply looping through all the matching documents until
|
|
||||||
* the target document is identified.
|
|
||||||
* </li>
|
|
||||||
* <li>
|
|
||||||
* {@link org.apache.lucene.search.Scorer#getChildren getChildren()} — Returns any child subscorers
|
* {@link org.apache.lucene.search.Scorer#getChildren getChildren()} — Returns any child subscorers
|
||||||
* underneath this scorer. This allows for users to navigate the scorer hierarchy and receive more fine-grained
|
* underneath this scorer. This allows for users to navigate the scorer hierarchy and receive more fine-grained
|
||||||
* details on the scoring process.
|
* details on the scoring process.
|
||||||
|
@ -531,7 +525,7 @@
|
||||||
* <p>Assuming a BooleanScorer2, we first initialize the Coordinator, which is used to apply the coord()
|
* <p>Assuming a BooleanScorer2, we first initialize the Coordinator, which is used to apply the coord()
|
||||||
* factor. We then get a internal Scorer based on the required, optional and prohibited parts of the query.
|
* factor. We then get a internal Scorer based on the required, optional and prohibited parts of the query.
|
||||||
* Using this internal Scorer, the BooleanScorer2 then proceeds into a while loop based on the
|
* Using this internal Scorer, the BooleanScorer2 then proceeds into a while loop based on the
|
||||||
* {@link org.apache.lucene.search.Scorer#nextDoc Scorer.nextDoc()} method. The nextDoc() method advances
|
* {@link org.apache.lucene.search.DocIdSetIterator#nextDoc DocIdSetIterator.nextDoc()} method. The nextDoc() method advances
|
||||||
* to the next document matching the query. This is an abstract method in the Scorer class and is thus
|
* to the next document matching the query. This is an abstract method in the Scorer class and is thus
|
||||||
* overridden by all derived implementations. If you have a simple OR query your internal Scorer is most
|
* overridden by all derived implementations. If you have a simple OR query your internal Scorer is most
|
||||||
* likely a DisjunctionSumScorer, which essentially combines the scorers from the sub scorers of the OR'd terms.
|
* likely a DisjunctionSumScorer, which essentially combines the scorers from the sub scorers of the OR'd terms.
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.util.List;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.ConjunctionDISI;
|
import org.apache.lucene.search.ConjunctionDISI;
|
||||||
import org.apache.lucene.search.TwoPhaseIterator;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Common super class for multiple sub spans required in a document.
|
* Common super class for multiple sub spans required in a document.
|
||||||
|
@ -34,13 +33,12 @@ abstract class ConjunctionSpans extends Spans {
|
||||||
boolean atFirstInCurrentDoc; // a first start position is available in current doc for nextStartPosition
|
boolean atFirstInCurrentDoc; // a first start position is available in current doc for nextStartPosition
|
||||||
boolean oneExhaustedInCurrentDoc; // one subspans exhausted in current doc
|
boolean oneExhaustedInCurrentDoc; // one subspans exhausted in current doc
|
||||||
|
|
||||||
ConjunctionSpans(List<Spans> subSpans, SpanWeight weight, Similarity.SimScorer docScorer) {
|
ConjunctionSpans(List<Spans> subSpans) {
|
||||||
super(weight, docScorer);
|
|
||||||
if (subSpans.size() < 2) {
|
if (subSpans.size() < 2) {
|
||||||
throw new IllegalArgumentException("Less than 2 subSpans.size():" + subSpans.size());
|
throw new IllegalArgumentException("Less than 2 subSpans.size():" + subSpans.size());
|
||||||
}
|
}
|
||||||
this.subSpans = subSpans.toArray(new Spans[subSpans.size()]);
|
this.subSpans = subSpans.toArray(new Spans[subSpans.size()]);
|
||||||
this.conjunction = ConjunctionDISI.intersect(subSpans);
|
this.conjunction = ConjunctionDISI.intersectSpans(subSpans);
|
||||||
this.atFirstInCurrentDoc = true; // ensure for doc -1 that start/end positions are -1
|
this.atFirstInCurrentDoc = true; // ensure for doc -1 that start/end positions are -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,15 +21,13 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
|
||||||
|
|
||||||
abstract class ContainSpans extends ConjunctionSpans {
|
abstract class ContainSpans extends ConjunctionSpans {
|
||||||
Spans sourceSpans;
|
Spans sourceSpans;
|
||||||
Spans bigSpans;
|
Spans bigSpans;
|
||||||
Spans littleSpans;
|
Spans littleSpans;
|
||||||
|
|
||||||
ContainSpans(SpanWeight weight, Similarity.SimScorer simScorer, Spans bigSpans, Spans littleSpans, Spans sourceSpans) {
|
ContainSpans(Spans bigSpans, Spans littleSpans, Spans sourceSpans) {
|
||||||
super(Arrays.asList(bigSpans, littleSpans), weight, simScorer);
|
super(Arrays.asList(bigSpans, littleSpans));
|
||||||
this.bigSpans = Objects.requireNonNull(bigSpans);
|
this.bigSpans = Objects.requireNonNull(bigSpans);
|
||||||
this.littleSpans = Objects.requireNonNull(littleSpans);
|
this.littleSpans = Objects.requireNonNull(littleSpans);
|
||||||
this.sourceSpans = Objects.requireNonNull(sourceSpans);
|
this.sourceSpans = Objects.requireNonNull(sourceSpans);
|
||||||
|
|
|
@ -21,7 +21,6 @@ import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import org.apache.lucene.search.TwoPhaseIterator;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A {@link Spans} implementation wrapping another spans instance,
|
* A {@link Spans} implementation wrapping another spans instance,
|
||||||
|
@ -36,8 +35,7 @@ public abstract class FilterSpans extends Spans {
|
||||||
private int startPos = -1;
|
private int startPos = -1;
|
||||||
|
|
||||||
/** Wrap the given {@link Spans}. */
|
/** Wrap the given {@link Spans}. */
|
||||||
protected FilterSpans(Spans in, Similarity.SimScorer docScorer) {
|
protected FilterSpans(Spans in) {
|
||||||
super((SpanWeight)in.getWeight(), docScorer);
|
|
||||||
this.in = Objects.requireNonNull(in);
|
this.in = Objects.requireNonNull(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.search.spans;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Spans that is formed from the ordered subspans of a SpanNearQuery
|
* A Spans that is formed from the ordered subspans of a SpanNearQuery
|
||||||
* where the subspans do not overlap and have a maximum slop between them.
|
* where the subspans do not overlap and have a maximum slop between them.
|
||||||
|
@ -52,8 +50,8 @@ public class NearSpansOrdered extends ConjunctionSpans {
|
||||||
|
|
||||||
private final int allowedSlop;
|
private final int allowedSlop;
|
||||||
|
|
||||||
public NearSpansOrdered(SpanWeight weight, int allowedSlop, List<Spans> subSpans, Similarity.SimScorer simScorer) throws IOException {
|
public NearSpansOrdered(int allowedSlop, List<Spans> subSpans) throws IOException {
|
||||||
super(subSpans, weight, simScorer);
|
super(subSpans);
|
||||||
this.atFirstInCurrentDoc = true; // -1 startPosition/endPosition also at doc -1
|
this.atFirstInCurrentDoc = true; // -1 startPosition/endPosition also at doc -1
|
||||||
this.allowedSlop = allowedSlop;
|
this.allowedSlop = allowedSlop;
|
||||||
}
|
}
|
||||||
|
@ -152,10 +150,5 @@ public class NearSpansOrdered extends ConjunctionSpans {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "NearSpansOrdered("+weight.getQuery().toString()+")@"+docID()+": "+startPosition()+" - "+endPosition();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.search.TwoPhaseIterator;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
|
||||||
import org.apache.lucene.util.PriorityQueue;
|
import org.apache.lucene.util.PriorityQueue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,9 +37,9 @@ public class NearSpansUnordered extends ConjunctionSpans {
|
||||||
|
|
||||||
private SpanPositionQueue spanPositionQueue;
|
private SpanPositionQueue spanPositionQueue;
|
||||||
|
|
||||||
public NearSpansUnordered(SpanWeight weight, int allowedSlop, List<Spans> subSpans, Similarity.SimScorer simScorer)
|
public NearSpansUnordered(int allowedSlop, List<Spans> subSpans)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super(subSpans, weight, simScorer);
|
super(subSpans);
|
||||||
|
|
||||||
this.subSpanCells = new ArrayList<>(subSpans.size());
|
this.subSpanCells = new ArrayList<>(subSpans.size());
|
||||||
for (Spans subSpan : subSpans) { // sub spans in query order
|
for (Spans subSpan : subSpans) { // sub spans in query order
|
||||||
|
@ -77,7 +76,6 @@ public class NearSpansUnordered extends ConjunctionSpans {
|
||||||
final Spans in;
|
final Spans in;
|
||||||
|
|
||||||
public SpansCell(Spans spans) {
|
public SpansCell(Spans spans) {
|
||||||
super((SpanWeight) NearSpansUnordered.this.weight, NearSpansUnordered.this.docScorer);
|
|
||||||
this.in = spans;
|
this.in = spans;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,13 +265,4 @@ public class NearSpansUnordered extends ConjunctionSpans {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
if (minPositionCell() != null) {
|
|
||||||
return getClass().getName() + "("+weight.getQuery().toString()+")@"+
|
|
||||||
(docID()+":"+startPosition()+"-"+endPosition());
|
|
||||||
} else {
|
|
||||||
return getClass().getName() + "("+weight.getQuery().toString()+")@ ?START?";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,6 @@ public class ScoringWrapperSpans extends Spans {
|
||||||
* @param simScorer the SimScorer to use for scoring
|
* @param simScorer the SimScorer to use for scoring
|
||||||
*/
|
*/
|
||||||
public ScoringWrapperSpans(Spans spans, Similarity.SimScorer simScorer) {
|
public ScoringWrapperSpans(Spans spans, Similarity.SimScorer simScorer) {
|
||||||
super((SpanWeight) spans.getWeight(), simScorer);
|
|
||||||
this.in = spans;
|
this.in = spans;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -151,7 +151,7 @@ public final class SpanBoostQuery extends SpanQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public SpanScorer scorer(LeafReaderContext context) throws IOException {
|
||||||
return weight.scorer(context);
|
return weight.scorer(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ public final class SpanContainingQuery extends SpanContainQuery {
|
||||||
Spans big = containerContained.get(0);
|
Spans big = containerContained.get(0);
|
||||||
Spans little = containerContained.get(1);
|
Spans little = containerContained.get(1);
|
||||||
|
|
||||||
return new ContainSpans(this, getSimScorer(context), big, little, big) {
|
return new ContainSpans(big, little, big) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
boolean twoPhaseCurrentDocMatches() throws IOException {
|
boolean twoPhaseCurrentDocMatches() throws IOException {
|
||||||
|
|
|
@ -219,8 +219,8 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
// all NearSpans require at least two subSpans
|
// all NearSpans require at least two subSpans
|
||||||
return (!inOrder) ? new NearSpansUnordered(this, slop, subSpans, getSimScorer(context))
|
return (!inOrder) ? new NearSpansUnordered(slop, subSpans)
|
||||||
: new NearSpansOrdered(this, slop, subSpans, getSimScorer(context));
|
: new NearSpansOrdered(slop, subSpans);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -330,7 +330,6 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
|
||||||
final int width;
|
final int width;
|
||||||
|
|
||||||
GapSpans(int width) {
|
GapSpans(int width) {
|
||||||
super(null, null);
|
|
||||||
this.width = width;
|
this.width = width;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ public final class SpanNotQuery extends SpanQuery {
|
||||||
TwoPhaseIterator excludeTwoPhase = excludeSpans.asTwoPhaseIterator();
|
TwoPhaseIterator excludeTwoPhase = excludeSpans.asTwoPhaseIterator();
|
||||||
DocIdSetIterator excludeApproximation = excludeTwoPhase == null ? null : excludeTwoPhase.approximation();
|
DocIdSetIterator excludeApproximation = excludeTwoPhase == null ? null : excludeTwoPhase.approximation();
|
||||||
|
|
||||||
return new FilterSpans(includeSpans, getSimScorer(context)) {
|
return new FilterSpans(includeSpans) {
|
||||||
// last document we have checked matches() against for the exclusion, and failed
|
// last document we have checked matches() against for the exclusion, and failed
|
||||||
// when using approximations, so we don't call it again, and pass thru all inclusions.
|
// when using approximations, so we don't call it again, and pass thru all inclusions.
|
||||||
int lastApproxDoc = -1;
|
int lastApproxDoc = -1;
|
||||||
|
|
|
@ -169,20 +169,20 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
return new ScoringWrapperSpans(subSpans.get(0), getSimScorer(context));
|
return new ScoringWrapperSpans(subSpans.get(0), getSimScorer(context));
|
||||||
}
|
}
|
||||||
|
|
||||||
DisiPriorityQueue<Spans> byDocQueue = new DisiPriorityQueue<>(subSpans.size());
|
DisiPriorityQueue byDocQueue = new DisiPriorityQueue(subSpans.size());
|
||||||
for (Spans spans : subSpans) {
|
for (Spans spans : subSpans) {
|
||||||
byDocQueue.add(new DisiWrapper<>(spans));
|
byDocQueue.add(new DisiWrapper(spans));
|
||||||
}
|
}
|
||||||
|
|
||||||
SpanPositionQueue byPositionQueue = new SpanPositionQueue(subSpans.size()); // when empty use -1
|
SpanPositionQueue byPositionQueue = new SpanPositionQueue(subSpans.size()); // when empty use -1
|
||||||
|
|
||||||
return new Spans(this, getSimScorer(context)) {
|
return new Spans() {
|
||||||
Spans topPositionSpans = null;
|
Spans topPositionSpans = null;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
topPositionSpans = null;
|
topPositionSpans = null;
|
||||||
DisiWrapper<Spans> topDocSpans = byDocQueue.top();
|
DisiWrapper topDocSpans = byDocQueue.top();
|
||||||
int currentDoc = topDocSpans.doc;
|
int currentDoc = topDocSpans.doc;
|
||||||
do {
|
do {
|
||||||
topDocSpans.doc = topDocSpans.iterator.nextDoc();
|
topDocSpans.doc = topDocSpans.iterator.nextDoc();
|
||||||
|
@ -194,7 +194,7 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
topPositionSpans = null;
|
topPositionSpans = null;
|
||||||
DisiWrapper<Spans> topDocSpans = byDocQueue.top();
|
DisiWrapper topDocSpans = byDocQueue.top();
|
||||||
do {
|
do {
|
||||||
topDocSpans.doc = topDocSpans.iterator.advance(target);
|
topDocSpans.doc = topDocSpans.iterator.advance(target);
|
||||||
topDocSpans = byDocQueue.updateTop();
|
topDocSpans = byDocQueue.updateTop();
|
||||||
|
@ -204,7 +204,7 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
DisiWrapper<Spans> topDocSpans = byDocQueue.top();
|
DisiWrapper topDocSpans = byDocQueue.top();
|
||||||
return topDocSpans.doc;
|
return topDocSpans.doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
float sumMatchCost = 0; // See also DisjunctionScorer.asTwoPhaseIterator()
|
float sumMatchCost = 0; // See also DisjunctionScorer.asTwoPhaseIterator()
|
||||||
long sumApproxCost = 0;
|
long sumApproxCost = 0;
|
||||||
|
|
||||||
for (DisiWrapper<Spans> w : byDocQueue) {
|
for (DisiWrapper w : byDocQueue) {
|
||||||
if (w.twoPhaseView != null) {
|
if (w.twoPhaseView != null) {
|
||||||
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
||||||
sumMatchCost += w.twoPhaseView.matchCost() * costWeight;
|
sumMatchCost += w.twoPhaseView.matchCost() * costWeight;
|
||||||
|
@ -228,7 +228,7 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
|
|
||||||
final float matchCost = sumMatchCost / sumApproxCost;
|
final float matchCost = sumMatchCost / sumApproxCost;
|
||||||
|
|
||||||
return new TwoPhaseIterator(new DisjunctionDISIApproximation<Spans>(byDocQueue)) {
|
return new TwoPhaseIterator(new DisjunctionDISIApproximation(byDocQueue)) {
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
return twoPhaseCurrentDocMatches();
|
return twoPhaseCurrentDocMatches();
|
||||||
|
@ -246,9 +246,9 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
void computePositionsCost() {
|
void computePositionsCost() {
|
||||||
float sumPositionsCost = 0;
|
float sumPositionsCost = 0;
|
||||||
long sumCost = 0;
|
long sumCost = 0;
|
||||||
for (DisiWrapper<Spans> w : byDocQueue) {
|
for (DisiWrapper w : byDocQueue) {
|
||||||
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
long costWeight = (w.cost <= 1) ? 1 : w.cost;
|
||||||
sumPositionsCost += w.iterator.positionsCost() * costWeight;
|
sumPositionsCost += w.spans.positionsCost() * costWeight;
|
||||||
sumCost += costWeight;
|
sumCost += costWeight;
|
||||||
}
|
}
|
||||||
positionsCost = sumPositionsCost / sumCost;
|
positionsCost = sumPositionsCost / sumCost;
|
||||||
|
@ -265,7 +265,7 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
int lastDocTwoPhaseMatched = -1;
|
int lastDocTwoPhaseMatched = -1;
|
||||||
|
|
||||||
boolean twoPhaseCurrentDocMatches() throws IOException {
|
boolean twoPhaseCurrentDocMatches() throws IOException {
|
||||||
DisiWrapper<Spans> listAtCurrentDoc = byDocQueue.topList();
|
DisiWrapper listAtCurrentDoc = byDocQueue.topList();
|
||||||
// remove the head of the list as long as it does not match
|
// remove the head of the list as long as it does not match
|
||||||
final int currentDoc = listAtCurrentDoc.doc;
|
final int currentDoc = listAtCurrentDoc.doc;
|
||||||
while (listAtCurrentDoc.twoPhaseView != null) {
|
while (listAtCurrentDoc.twoPhaseView != null) {
|
||||||
|
@ -289,9 +289,9 @@ public final class SpanOrQuery extends SpanQuery {
|
||||||
void fillPositionQueue() throws IOException { // called at first nextStartPosition
|
void fillPositionQueue() throws IOException { // called at first nextStartPosition
|
||||||
assert byPositionQueue.size() == 0;
|
assert byPositionQueue.size() == 0;
|
||||||
// add all matching Spans at current doc to byPositionQueue
|
// add all matching Spans at current doc to byPositionQueue
|
||||||
DisiWrapper<Spans> listAtCurrentDoc = byDocQueue.topList();
|
DisiWrapper listAtCurrentDoc = byDocQueue.topList();
|
||||||
while (listAtCurrentDoc != null) {
|
while (listAtCurrentDoc != null) {
|
||||||
Spans spansAtDoc = listAtCurrentDoc.iterator;
|
Spans spansAtDoc = listAtCurrentDoc.spans;
|
||||||
if (lastDocTwoPhaseMatched == listAtCurrentDoc.doc) { // matched by DisjunctionDisiApproximation
|
if (lastDocTwoPhaseMatched == listAtCurrentDoc.doc) { // matched by DisjunctionDisiApproximation
|
||||||
if (listAtCurrentDoc.twoPhaseView != null) { // matched by approximation
|
if (listAtCurrentDoc.twoPhaseView != null) { // matched by approximation
|
||||||
if (listAtCurrentDoc.lastApproxNonMatchDoc == listAtCurrentDoc.doc) { // matches() returned false
|
if (listAtCurrentDoc.lastApproxNonMatchDoc == listAtCurrentDoc.doc) { // matches() returned false
|
||||||
|
|
|
@ -94,7 +94,7 @@ public abstract class SpanPositionCheckQuery extends SpanQuery implements Clonea
|
||||||
@Override
|
@Override
|
||||||
public Spans getSpans(final LeafReaderContext context, Postings requiredPostings) throws IOException {
|
public Spans getSpans(final LeafReaderContext context, Postings requiredPostings) throws IOException {
|
||||||
Spans matchSpans = matchWeight.getSpans(context, requiredPostings);
|
Spans matchSpans = matchWeight.getSpans(context, requiredPostings);
|
||||||
return (matchSpans == null) ? null : new FilterSpans(matchSpans, getSimScorer(context)) {
|
return (matchSpans == null) ? null : new FilterSpans(matchSpans) {
|
||||||
@Override
|
@Override
|
||||||
protected AcceptStatus accept(Spans candidate) throws IOException {
|
protected AcceptStatus accept(Spans candidate) throws IOException {
|
||||||
return acceptPosition(candidate);
|
return acceptPosition(candidate);
|
||||||
|
|
|
@ -0,0 +1,145 @@
|
||||||
|
package org.apache.lucene.search.spans;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A basic {@link Scorer} over {@link Spans}.
|
||||||
|
* @lucene.experimental
|
||||||
|
*/
|
||||||
|
public class SpanScorer extends Scorer {
|
||||||
|
|
||||||
|
protected final Spans spans;
|
||||||
|
protected final Similarity.SimScorer docScorer;
|
||||||
|
|
||||||
|
/** accumulated sloppy freq (computed in setFreqCurrentDoc) */
|
||||||
|
private float freq;
|
||||||
|
/** number of matches (computed in setFreqCurrentDoc) */
|
||||||
|
private int numMatches;
|
||||||
|
private int lastScoredDoc = -1; // last doc we called setFreqCurrentDoc() for
|
||||||
|
|
||||||
|
/** Sole constructor. */
|
||||||
|
public SpanScorer(SpanWeight weight, Spans spans, Similarity.SimScorer docScorer) {
|
||||||
|
super(weight);
|
||||||
|
this.spans = Objects.requireNonNull(spans);
|
||||||
|
this.docScorer = docScorer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return spans.docID();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return spans;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
|
return spans.asTwoPhaseIterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Score the current doc. The default implementation scores the doc
|
||||||
|
* with the similarity using the slop-adjusted {@link #freq}.
|
||||||
|
*/
|
||||||
|
protected float scoreCurrentDoc() throws IOException {
|
||||||
|
assert docScorer != null : getClass() + " has a null docScorer!";
|
||||||
|
return docScorer.score(docID(), freq);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets {@link #freq} and {@link #numMatches} for the current document.
|
||||||
|
* <p>
|
||||||
|
* This will be called at most once per document.
|
||||||
|
*/
|
||||||
|
protected final void setFreqCurrentDoc() throws IOException {
|
||||||
|
freq = 0.0f;
|
||||||
|
numMatches = 0;
|
||||||
|
|
||||||
|
spans.doStartCurrentDoc();
|
||||||
|
|
||||||
|
assert spans.startPosition() == -1 : "incorrect initial start position, " + this.toString();
|
||||||
|
assert spans.endPosition() == -1 : "incorrect initial end position, " + this.toString();
|
||||||
|
int prevStartPos = -1;
|
||||||
|
int prevEndPos = -1;
|
||||||
|
|
||||||
|
int startPos = spans.nextStartPosition();
|
||||||
|
assert startPos != Spans.NO_MORE_POSITIONS : "initial startPos NO_MORE_POSITIONS, " + this.toString();
|
||||||
|
do {
|
||||||
|
assert startPos >= prevStartPos;
|
||||||
|
int endPos = spans.endPosition();
|
||||||
|
assert endPos != Spans.NO_MORE_POSITIONS;
|
||||||
|
// This assertion can fail for Or spans on the same term:
|
||||||
|
// assert (startPos != prevStartPos) || (endPos > prevEndPos) : "non increased endPos="+endPos;
|
||||||
|
assert (startPos != prevStartPos) || (endPos >= prevEndPos) : "decreased endPos="+endPos;
|
||||||
|
numMatches++;
|
||||||
|
if (docScorer == null) { // scores not required, break out here
|
||||||
|
freq = 1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
freq += docScorer.computeSlopFactor(spans.width());
|
||||||
|
spans.doCurrentSpans();
|
||||||
|
prevStartPos = startPos;
|
||||||
|
prevEndPos = endPos;
|
||||||
|
startPos = spans.nextStartPosition();
|
||||||
|
} while (startPos != Spans.NO_MORE_POSITIONS);
|
||||||
|
|
||||||
|
assert spans.startPosition() == Spans.NO_MORE_POSITIONS : "incorrect final start position, " + this.toString();
|
||||||
|
assert spans.endPosition() == Spans.NO_MORE_POSITIONS : "incorrect final end position, " + this.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure setFreqCurrentDoc is called, if not already called for the current doc.
|
||||||
|
*/
|
||||||
|
private void ensureFreq() throws IOException {
|
||||||
|
int currentDoc = docID();
|
||||||
|
if (lastScoredDoc != currentDoc) {
|
||||||
|
setFreqCurrentDoc();
|
||||||
|
lastScoredDoc = currentDoc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final float score() throws IOException {
|
||||||
|
ensureFreq();
|
||||||
|
return scoreCurrentDoc();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final int freq() throws IOException {
|
||||||
|
ensureFreq();
|
||||||
|
return numMatches;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the intermediate "sloppy freq" adjusted for edit distance
|
||||||
|
* @lucene.internal */
|
||||||
|
final float sloppyFreq() throws IOException {
|
||||||
|
ensureFreq();
|
||||||
|
return freq;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -118,7 +118,7 @@ public class SpanTermQuery extends SpanQuery {
|
||||||
|
|
||||||
final PostingsEnum postings = termsEnum.postings(null, requiredPostings.getRequiredPostings());
|
final PostingsEnum postings = termsEnum.postings(null, requiredPostings.getRequiredPostings());
|
||||||
float positionsCost = termPositionsCost(termsEnum) * PHRASE_TO_SPAN_TERM_POSITIONS_COST;
|
float positionsCost = termPositionsCost(termsEnum) * PHRASE_TO_SPAN_TERM_POSITIONS_COST;
|
||||||
return new TermSpans(this, getSimScorer(context), postings, term, positionsCost);
|
return new TermSpans(getSimScorer(context), postings, term, positionsCost);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.index.TermContext;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.search.CollectionStatistics;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
|
@ -130,8 +129,13 @@ public abstract class SpanWeight extends Weight {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public SpanScorer scorer(LeafReaderContext context) throws IOException {
|
||||||
return getSpans(context, Postings.POSITIONS);
|
final Spans spans = getSpans(context, Postings.POSITIONS);
|
||||||
|
if (spans == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
final Similarity.SimScorer docScorer = getSimScorer(context);
|
||||||
|
return new SpanScorer(this, spans, docScorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -146,9 +150,9 @@ public abstract class SpanWeight extends Weight {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
Spans scorer = (Spans) scorer(context);
|
SpanScorer scorer = scorer(context);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
int newDoc = scorer.advance(doc);
|
int newDoc = scorer.iterator().advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
float freq = scorer.sloppyFreq();
|
float freq = scorer.sloppyFreq();
|
||||||
SimScorer docScorer = similarity.simScorer(simWeight, context);
|
SimScorer docScorer = similarity.simScorer(simWeight, context);
|
||||||
|
|
|
@ -72,7 +72,7 @@ public final class SpanWithinQuery extends SpanContainQuery {
|
||||||
Spans big = containerContained.get(0);
|
Spans big = containerContained.get(0);
|
||||||
Spans little = containerContained.get(1);
|
Spans little = containerContained.get(1);
|
||||||
|
|
||||||
return new ContainSpans(this, getSimScorer(context), big, little, little) {
|
return new ContainSpans(big, little, little) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
boolean twoPhaseCurrentDocMatches() throws IOException {
|
boolean twoPhaseCurrentDocMatches() throws IOException {
|
||||||
|
|
|
@ -19,8 +19,9 @@ package org.apache.lucene.search.spans;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
import org.apache.lucene.search.similarities.Similarity.SimScorer;
|
import org.apache.lucene.search.similarities.Similarity.SimScorer;
|
||||||
|
|
||||||
/** Iterates through combinations of start/end positions per-doc.
|
/** Iterates through combinations of start/end positions per-doc.
|
||||||
|
@ -28,24 +29,10 @@ import org.apache.lucene.search.similarities.Similarity.SimScorer;
|
||||||
* These are enumerated in order, by increasing document number, within that by
|
* These are enumerated in order, by increasing document number, within that by
|
||||||
* increasing start position and finally by increasing end position.
|
* increasing start position and finally by increasing end position.
|
||||||
*/
|
*/
|
||||||
public abstract class Spans extends Scorer {
|
public abstract class Spans extends DocIdSetIterator {
|
||||||
|
|
||||||
public static final int NO_MORE_POSITIONS = Integer.MAX_VALUE;
|
public static final int NO_MORE_POSITIONS = Integer.MAX_VALUE;
|
||||||
|
|
||||||
protected final Similarity.SimScorer docScorer;
|
|
||||||
|
|
||||||
protected Spans(SpanWeight weight, SimScorer docScorer) {
|
|
||||||
super(weight);
|
|
||||||
this.docScorer = docScorer;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** accumulated sloppy freq (computed in setFreqCurrentDoc) */
|
|
||||||
protected float freq;
|
|
||||||
/** number of matches (computed in setFreqCurrentDoc) */
|
|
||||||
protected int numMatches;
|
|
||||||
|
|
||||||
private int lastScoredDoc = -1; // last doc we called setFreqCurrentDoc() for
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the next start position for the current doc.
|
* Returns the next start position for the current doc.
|
||||||
* There is always at least one start/end position per doc.
|
* There is always at least one start/end position per doc.
|
||||||
|
@ -97,6 +84,16 @@ public abstract class Spans extends Scorer {
|
||||||
*/
|
*/
|
||||||
public abstract float positionsCost();
|
public abstract float positionsCost();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional method: Return a {@link TwoPhaseIterator} view of this
|
||||||
|
* {@link Scorer}. A return value of {@code null} indicates that
|
||||||
|
* two-phase iteration is not supported.
|
||||||
|
* @see Scorer#twoPhaseIterator()
|
||||||
|
*/
|
||||||
|
public TwoPhaseIterator asTwoPhaseIterator() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
|
@ -109,58 +106,6 @@ public abstract class Spans extends Scorer {
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensure setFreqCurrentDoc is called, if not already called for the current doc.
|
|
||||||
*/
|
|
||||||
private void ensureFreq() throws IOException {
|
|
||||||
int currentDoc = docID();
|
|
||||||
if (lastScoredDoc != currentDoc) {
|
|
||||||
setFreqCurrentDoc();
|
|
||||||
lastScoredDoc = currentDoc;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets {@link #freq} and {@link #numMatches} for the current document.
|
|
||||||
* <p>
|
|
||||||
* This will be called at most once per document.
|
|
||||||
*/
|
|
||||||
protected final void setFreqCurrentDoc() throws IOException {
|
|
||||||
freq = 0.0f;
|
|
||||||
numMatches = 0;
|
|
||||||
|
|
||||||
doStartCurrentDoc();
|
|
||||||
|
|
||||||
assert startPosition() == -1 : "incorrect initial start position, " + this.toString();
|
|
||||||
assert endPosition() == -1 : "incorrect initial end position, " + this.toString();
|
|
||||||
int prevStartPos = -1;
|
|
||||||
int prevEndPos = -1;
|
|
||||||
|
|
||||||
int startPos = nextStartPosition();
|
|
||||||
assert startPos != Spans.NO_MORE_POSITIONS : "initial startPos NO_MORE_POSITIONS, " + this.toString();
|
|
||||||
do {
|
|
||||||
assert startPos >= prevStartPos;
|
|
||||||
int endPos = endPosition();
|
|
||||||
assert endPos != Spans.NO_MORE_POSITIONS;
|
|
||||||
// This assertion can fail for Or spans on the same term:
|
|
||||||
// assert (startPos != prevStartPos) || (endPos > prevEndPos) : "non increased endPos="+endPos;
|
|
||||||
assert (startPos != prevStartPos) || (endPos >= prevEndPos) : "decreased endPos="+endPos;
|
|
||||||
numMatches++;
|
|
||||||
if (docScorer == null) { // scores not required, break out here
|
|
||||||
freq = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
freq += docScorer.computeSlopFactor(width());
|
|
||||||
doCurrentSpans();
|
|
||||||
prevStartPos = startPos;
|
|
||||||
prevEndPos = endPos;
|
|
||||||
startPos = nextStartPosition();
|
|
||||||
} while (startPos != Spans.NO_MORE_POSITIONS);
|
|
||||||
|
|
||||||
assert startPosition() == Spans.NO_MORE_POSITIONS : "incorrect final start position, " + this.toString();
|
|
||||||
assert endPosition() == Spans.NO_MORE_POSITIONS : "incorrect final end position, " + this.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called before the current doc's frequency is calculated
|
* Called before the current doc's frequency is calculated
|
||||||
*/
|
*/
|
||||||
|
@ -171,32 +116,4 @@ public abstract class Spans extends Scorer {
|
||||||
*/
|
*/
|
||||||
protected void doCurrentSpans() throws IOException {}
|
protected void doCurrentSpans() throws IOException {}
|
||||||
|
|
||||||
/**
|
|
||||||
* Score the current doc. The default implementation scores the doc
|
|
||||||
* with the similarity using the slop-adjusted {@link #freq}.
|
|
||||||
*/
|
|
||||||
protected float scoreCurrentDoc() throws IOException {
|
|
||||||
assert docScorer != null : getClass() + " has a null docScorer!";
|
|
||||||
return docScorer.score(docID(), freq);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final float score() throws IOException {
|
|
||||||
ensureFreq();
|
|
||||||
return scoreCurrentDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final int freq() throws IOException {
|
|
||||||
ensureFreq();
|
|
||||||
return numMatches;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Returns the intermediate "sloppy freq" adjusted for edit distance
|
|
||||||
* @lucene.internal */
|
|
||||||
final float sloppyFreq() throws IOException {
|
|
||||||
ensureFreq();
|
|
||||||
return freq;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,9 +39,8 @@ public class TermSpans extends Spans {
|
||||||
protected boolean readPayload;
|
protected boolean readPayload;
|
||||||
private final float positionsCost;
|
private final float positionsCost;
|
||||||
|
|
||||||
public TermSpans(SpanWeight weight, Similarity.SimScorer scorer,
|
public TermSpans(Similarity.SimScorer scorer,
|
||||||
PostingsEnum postings, Term term, float positionsCost) {
|
PostingsEnum postings, Term term, float positionsCost) {
|
||||||
super(weight, scorer);
|
|
||||||
this.postings = Objects.requireNonNull(postings);
|
this.postings = Objects.requireNonNull(postings);
|
||||||
this.term = Objects.requireNonNull(term);
|
this.term = Objects.requireNonNull(term);
|
||||||
this.doc = -1;
|
this.doc = -1;
|
||||||
|
|
|
@ -177,17 +177,7 @@ final class JustCompileSearch {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) {
|
|
||||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -824,7 +824,7 @@ public class TestBooleanCoord extends LuceneTestCase {
|
||||||
Weight weight = searcher.createNormalizedWeight(query, true);
|
Weight weight = searcher.createNormalizedWeight(query, true);
|
||||||
Scorer scorer = weight.scorer(reader.leaves().get(0));
|
Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer.docID() == -1 || scorer.docID() == DocIdSetIterator.NO_MORE_DOCS);
|
assertTrue(scorer.docID() == -1 || scorer.docID() == DocIdSetIterator.NO_MORE_DOCS);
|
||||||
assertEquals(0, scorer.nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
assertEquals(expected, scorer.score(), 0.0001f);
|
assertEquals(expected, scorer.score(), 0.0001f);
|
||||||
|
|
||||||
// test bulk scorer
|
// test bulk scorer
|
||||||
|
|
|
@ -357,7 +357,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
|
|
||||||
// First pass: just use .nextDoc() to gather all hits
|
// First pass: just use .nextDoc() to gather all hits
|
||||||
final List<ScoreDoc> hits = new ArrayList<>();
|
final List<ScoreDoc> hits = new ArrayList<>();
|
||||||
while(scorer.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while(scorer.iterator().nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
hits.add(new ScoreDoc(scorer.docID(), scorer.score()));
|
hits.add(new ScoreDoc(scorer.docID(), scorer.score()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -384,12 +384,12 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
if (left == 1 || random().nextBoolean()) {
|
if (left == 1 || random().nextBoolean()) {
|
||||||
// next
|
// next
|
||||||
nextUpto = 1+upto;
|
nextUpto = 1+upto;
|
||||||
nextDoc = scorer.nextDoc();
|
nextDoc = scorer.iterator().nextDoc();
|
||||||
} else {
|
} else {
|
||||||
// advance
|
// advance
|
||||||
int inc = TestUtil.nextInt(random(), 1, left - 1);
|
int inc = TestUtil.nextInt(random(), 1, left - 1);
|
||||||
nextUpto = inc + upto;
|
nextUpto = inc + upto;
|
||||||
nextDoc = scorer.advance(hits.get(nextUpto).doc);
|
nextDoc = scorer.iterator().advance(hits.get(nextUpto).doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nextUpto == hits.size()) {
|
if (nextUpto == hits.size()) {
|
||||||
|
@ -658,7 +658,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertTrue(scorer instanceof ConjunctionScorer);
|
assertTrue(scorer instanceof ConjunctionScorer);
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -687,7 +687,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof DisjunctionScorer);
|
assertTrue(scorer instanceof DisjunctionScorer);
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -718,7 +718,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertTrue(scorer instanceof BoostedScorer || scorer instanceof ExactPhraseScorer);
|
assertTrue(scorer instanceof BoostedScorer || scorer instanceof ExactPhraseScorer);
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -747,7 +747,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
final Weight weight = searcher.createNormalizedWeight(q.build(), random().nextBoolean());
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof ReqExclScorer);
|
assertTrue(scorer instanceof ReqExclScorer);
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -776,7 +776,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), true);
|
final Weight weight = searcher.createNormalizedWeight(q.build(), true);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof ReqOptSumScorer);
|
assertTrue(scorer instanceof ReqOptSumScorer);
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
|
@ -91,7 +91,7 @@ public class TestBooleanRewrites extends LuceneTestCase {
|
||||||
query2.add(new TermQuery(new Term("field", "b")), Occur.SHOULD);
|
query2.add(new TermQuery(new Term("field", "b")), Occur.SHOULD);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query2.build(), true);
|
final Weight weight = searcher.createNormalizedWeight(query2.build(), true);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertEquals(0, scorer.nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
assertTrue(scorer.getClass().getName(), scorer instanceof FilterScorer);
|
assertTrue(scorer.getClass().getName(), scorer instanceof FilterScorer);
|
||||||
assertEquals(0f, scorer.score(), 0f);
|
assertEquals(0f, scorer.score(), 0f);
|
||||||
|
|
||||||
|
|
|
@ -41,14 +41,8 @@ public class TestCachingCollector extends LuceneTestCase {
|
||||||
public int docID() { return 0; }
|
public int docID() { return 0; }
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException { return 0; }
|
public DocIdSetIterator iterator() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException { return 0; }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -52,8 +52,8 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* Create a {@link Scorer} that wraps the given {@link DocIdSetIterator}. It
|
* Create a {@link Scorer} that wraps the given {@link DocIdSetIterator}. It
|
||||||
* also accepts a {@link TwoPhaseIterator} view, which is exposed in
|
* also accepts a {@link TwoPhaseIterator} view, which is exposed in
|
||||||
* {@link Scorer#asTwoPhaseIterator()}. When the two-phase view is not null,
|
* {@link Scorer#twoPhaseIterator()}. When the two-phase view is not null,
|
||||||
* then {@link Scorer#nextDoc()} and {@link Scorer#advance(int)} will raise
|
* then {@link DocIdSetIterator#nextDoc()} and {@link DocIdSetIterator#advance(int)} will raise
|
||||||
* an exception in order to make sure that {@link ConjunctionDISI} takes
|
* an exception in order to make sure that {@link ConjunctionDISI} takes
|
||||||
* advantage of the {@link TwoPhaseIterator} view.
|
* advantage of the {@link TwoPhaseIterator} view.
|
||||||
*/
|
*/
|
||||||
|
@ -61,15 +61,11 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
return new Scorer(null) {
|
return new Scorer(null) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public DocIdSetIterator iterator() {
|
||||||
return twoPhaseIterator;
|
return new DocIdSetIterator() {
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
if (twoPhaseIterator != null) {
|
|
||||||
throw new UnsupportedOperationException("ConjunctionDISI should call the two-phase iterator");
|
|
||||||
}
|
|
||||||
return it.docID();
|
return it.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,6 +92,21 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
return it.cost();
|
return it.cost();
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
|
return twoPhaseIterator;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
if (twoPhaseIterator != null) {
|
||||||
|
throw new UnsupportedOperationException("ConjunctionDISI should call the two-phase iterator");
|
||||||
|
}
|
||||||
|
return it.docID();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
@ -154,13 +165,13 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
||||||
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
||||||
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
||||||
final DocIdSetIterator[] iterators = new DocIdSetIterator[numIterators];
|
final Scorer[] iterators = new Scorer[numIterators];
|
||||||
for (int i = 0; i < iterators.length; ++i) {
|
for (int i = 0; i < iterators.length; ++i) {
|
||||||
final FixedBitSet set = randomSet(maxDoc);
|
final FixedBitSet set = randomSet(maxDoc);
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
// simple iterator
|
// simple iterator
|
||||||
sets[i] = set;
|
sets[i] = set;
|
||||||
iterators[i] = new BitDocIdSet(set).iterator();
|
iterators[i] = new ConstantScoreScorer(null, 0f, new BitDocIdSet(set).iterator());
|
||||||
} else {
|
} else {
|
||||||
// scorer with approximation
|
// scorer with approximation
|
||||||
final FixedBitSet confirmed = clearRandomBits(set);
|
final FixedBitSet confirmed = clearRandomBits(set);
|
||||||
|
@ -170,7 +181,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final ConjunctionDISI conjunction = ConjunctionDISI.intersect(Arrays.asList(iterators));
|
final ConjunctionDISI conjunction = ConjunctionDISI.intersectScorers(Arrays.asList(iterators));
|
||||||
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction));
|
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -182,14 +193,14 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
||||||
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
||||||
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
||||||
final DocIdSetIterator[] iterators = new DocIdSetIterator[numIterators];
|
final Scorer[] iterators = new Scorer[numIterators];
|
||||||
boolean hasApproximation = false;
|
boolean hasApproximation = false;
|
||||||
for (int i = 0; i < iterators.length; ++i) {
|
for (int i = 0; i < iterators.length; ++i) {
|
||||||
final FixedBitSet set = randomSet(maxDoc);
|
final FixedBitSet set = randomSet(maxDoc);
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
// simple iterator
|
// simple iterator
|
||||||
sets[i] = set;
|
sets[i] = set;
|
||||||
iterators[i] = new BitDocIdSet(set).iterator();
|
iterators[i] = new ConstantScoreScorer(null, 0f, new BitDocIdSet(set).iterator());
|
||||||
} else {
|
} else {
|
||||||
// scorer with approximation
|
// scorer with approximation
|
||||||
final FixedBitSet confirmed = clearRandomBits(set);
|
final FixedBitSet confirmed = clearRandomBits(set);
|
||||||
|
@ -200,7 +211,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final ConjunctionDISI conjunction = ConjunctionDISI.intersect(Arrays.asList(iterators));
|
final ConjunctionDISI conjunction = ConjunctionDISI.intersectScorers(Arrays.asList(iterators));
|
||||||
TwoPhaseIterator twoPhaseIterator = conjunction.asTwoPhaseIterator();
|
TwoPhaseIterator twoPhaseIterator = conjunction.asTwoPhaseIterator();
|
||||||
assertEquals(hasApproximation, twoPhaseIterator != null);
|
assertEquals(hasApproximation, twoPhaseIterator != null);
|
||||||
if (hasApproximation) {
|
if (hasApproximation) {
|
||||||
|
@ -216,15 +227,15 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
||||||
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
final int numIterators = TestUtil.nextInt(random(), 2, 5);
|
||||||
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
||||||
DocIdSetIterator conjunction = null;
|
Scorer conjunction = null;
|
||||||
boolean hasApproximation = false;
|
boolean hasApproximation = false;
|
||||||
for (int i = 0; i < numIterators; ++i) {
|
for (int i = 0; i < numIterators; ++i) {
|
||||||
final FixedBitSet set = randomSet(maxDoc);
|
final FixedBitSet set = randomSet(maxDoc);
|
||||||
final DocIdSetIterator newIterator;
|
final Scorer newIterator;
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
// simple iterator
|
// simple iterator
|
||||||
sets[i] = set;
|
sets[i] = set;
|
||||||
newIterator = new BitDocIdSet(set).iterator();
|
newIterator = new ConstantScoreScorer(null, 0f, new BitDocIdSet(set).iterator());
|
||||||
} else {
|
} else {
|
||||||
// scorer with approximation
|
// scorer with approximation
|
||||||
final FixedBitSet confirmed = clearRandomBits(set);
|
final FixedBitSet confirmed = clearRandomBits(set);
|
||||||
|
@ -237,17 +248,17 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
if (conjunction == null) {
|
if (conjunction == null) {
|
||||||
conjunction = newIterator;
|
conjunction = newIterator;
|
||||||
} else {
|
} else {
|
||||||
final ConjunctionDISI conj = ConjunctionDISI.intersect(Arrays.asList(conjunction, newIterator));
|
final ConjunctionDISI conj = ConjunctionDISI.intersectScorers(Arrays.asList(conjunction, newIterator));
|
||||||
conjunction = scorer(conj, conj.asTwoPhaseIterator());
|
conjunction = scorer(conj, conj.asTwoPhaseIterator());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TwoPhaseIterator twoPhaseIterator = ((Scorer) conjunction).asTwoPhaseIterator();
|
TwoPhaseIterator twoPhaseIterator = ((Scorer) conjunction).twoPhaseIterator();
|
||||||
assertEquals(hasApproximation, twoPhaseIterator != null);
|
assertEquals(hasApproximation, twoPhaseIterator != null);
|
||||||
if (hasApproximation) {
|
if (hasApproximation) {
|
||||||
assertEquals(intersect(sets), toBitSet(maxDoc, TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator)));
|
assertEquals(intersect(sets), toBitSet(maxDoc, TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator)));
|
||||||
} else {
|
} else {
|
||||||
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction));
|
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction.iterator()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,47 +269,47 @@ public class TestConjunctionDISI extends LuceneTestCase {
|
||||||
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
final int maxDoc = TestUtil.nextInt(random(), 100, 10000);
|
||||||
final int numIterators = TestUtil.nextInt(random(), 5, 10);
|
final int numIterators = TestUtil.nextInt(random(), 5, 10);
|
||||||
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
final FixedBitSet[] sets = new FixedBitSet[numIterators];
|
||||||
final List<DocIdSetIterator> iterators = new LinkedList<>();
|
final List<Scorer> scorers = new LinkedList<>();
|
||||||
for (int i = 0; i < numIterators; ++i) {
|
for (int i = 0; i < numIterators; ++i) {
|
||||||
final FixedBitSet set = randomSet(maxDoc);
|
final FixedBitSet set = randomSet(maxDoc);
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
// simple iterator
|
// simple iterator
|
||||||
sets[i] = set;
|
sets[i] = set;
|
||||||
iterators.add(new BitDocIdSet(set).iterator());
|
scorers.add(new ConstantScoreScorer(null, 0f, new BitDocIdSet(set).iterator()));
|
||||||
} else {
|
} else {
|
||||||
// scorer with approximation
|
// scorer with approximation
|
||||||
final FixedBitSet confirmed = clearRandomBits(set);
|
final FixedBitSet confirmed = clearRandomBits(set);
|
||||||
sets[i] = confirmed;
|
sets[i] = confirmed;
|
||||||
final TwoPhaseIterator approximation = approximation(new BitDocIdSet(set).iterator(), confirmed);
|
final TwoPhaseIterator approximation = approximation(new BitDocIdSet(set).iterator(), confirmed);
|
||||||
iterators.add(scorer(approximation));
|
scorers.add(scorer(approximation));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// make some sub sequences into sub conjunctions
|
// make some sub sequences into sub conjunctions
|
||||||
final int subIters = atLeast(3);
|
final int subIters = atLeast(3);
|
||||||
for (int subIter = 0; subIter < subIters && iterators.size() > 3; ++subIter) {
|
for (int subIter = 0; subIter < subIters && scorers.size() > 3; ++subIter) {
|
||||||
final int subSeqStart = TestUtil.nextInt(random(), 0, iterators.size() - 2);
|
final int subSeqStart = TestUtil.nextInt(random(), 0, scorers.size() - 2);
|
||||||
final int subSeqEnd = TestUtil.nextInt(random(), subSeqStart + 2, iterators.size());
|
final int subSeqEnd = TestUtil.nextInt(random(), subSeqStart + 2, scorers.size());
|
||||||
List<DocIdSetIterator> subIterators = iterators.subList(subSeqStart, subSeqEnd);
|
List<Scorer> subIterators = scorers.subList(subSeqStart, subSeqEnd);
|
||||||
DocIdSetIterator subConjunction;
|
Scorer subConjunction;
|
||||||
if (wrapWithScorer) {
|
if (wrapWithScorer) {
|
||||||
subConjunction = new ConjunctionScorer(null, subIterators, Collections.emptyList());
|
subConjunction = new ConjunctionScorer(null, subIterators, Collections.emptyList());
|
||||||
} else {
|
} else {
|
||||||
subConjunction = ConjunctionDISI.intersect(subIterators);
|
subConjunction = new ConstantScoreScorer(null, 0f, ConjunctionDISI.intersectScorers(subIterators));
|
||||||
}
|
}
|
||||||
iterators.set(subSeqStart, subConjunction);
|
scorers.set(subSeqStart, subConjunction);
|
||||||
int toRemove = subSeqEnd - subSeqStart - 1;
|
int toRemove = subSeqEnd - subSeqStart - 1;
|
||||||
while (toRemove-- > 0) {
|
while (toRemove-- > 0) {
|
||||||
iterators.remove(subSeqStart + 1);
|
scorers.remove(subSeqStart + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (iterators.size() == 1) {
|
if (scorers.size() == 1) {
|
||||||
// ConjunctionDISI needs two iterators
|
// ConjunctionDISI needs two iterators
|
||||||
iterators.add(DocIdSetIterator.all(maxDoc));
|
scorers.add(new ConstantScoreScorer(null, 0f, DocIdSetIterator.all(maxDoc)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
final ConjunctionDISI conjunction = ConjunctionDISI.intersect(iterators);
|
final ConjunctionDISI conjunction = ConjunctionDISI.intersectScorers(scorers);
|
||||||
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction));
|
assertEquals(intersect(sets), toBitSet(maxDoc, conjunction));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,7 +216,7 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q, true);
|
final Weight weight = searcher.createNormalizedWeight(q, true);
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNotNull(scorer.asTwoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
|
@ -180,7 +180,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
||||||
final Weight dw = s.createNormalizedWeight(dq, true);
|
final Weight dw = s.createNormalizedWeight(dq, true);
|
||||||
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
||||||
final Scorer ds = dw.scorer(context);
|
final Scorer ds = dw.scorer(context);
|
||||||
final boolean skipOk = ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS;
|
final boolean skipOk = ds.iterator().advance(3) != DocIdSetIterator.NO_MORE_DOCS;
|
||||||
if (skipOk) {
|
if (skipOk) {
|
||||||
fail("firsttime skipTo found a match? ... "
|
fail("firsttime skipTo found a match? ... "
|
||||||
+ r.document(ds.docID()).get("id"));
|
+ r.document(ds.docID()).get("id"));
|
||||||
|
@ -197,7 +197,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
||||||
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
||||||
final Scorer ds = dw.scorer(context);
|
final Scorer ds = dw.scorer(context);
|
||||||
assertTrue("firsttime skipTo found no match",
|
assertTrue("firsttime skipTo found no match",
|
||||||
ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
|
ds.iterator().advance(3) != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
assertEquals("found wrong docid", "d4", r.document(ds.docID()).get("id"));
|
assertEquals("found wrong docid", "d4", r.document(ds.docID()).get("id"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -147,36 +147,40 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
||||||
|
|
||||||
private void assertNext(Scorer expected, Scorer actual) throws Exception {
|
private void assertNext(Scorer expected, Scorer actual) throws Exception {
|
||||||
if (actual == null) {
|
if (actual == null) {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, expected.nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, expected.iterator().nextDoc());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = expected.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
DocIdSetIterator expectedIt = expected.iterator();
|
||||||
assertEquals(doc, actual.nextDoc());
|
DocIdSetIterator actualIt = actual.iterator();
|
||||||
|
while ((doc = expectedIt.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
|
assertEquals(doc, actualIt.nextDoc());
|
||||||
assertEquals(expected.freq(), actual.freq());
|
assertEquals(expected.freq(), actual.freq());
|
||||||
float expectedScore = expected.score();
|
float expectedScore = expected.score();
|
||||||
float actualScore = actual.score();
|
float actualScore = actual.score();
|
||||||
assertEquals(expectedScore, actualScore, CheckHits.explainToleranceDelta(expectedScore, actualScore));
|
assertEquals(expectedScore, actualScore, CheckHits.explainToleranceDelta(expectedScore, actualScore));
|
||||||
}
|
}
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, actual.nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, actualIt.nextDoc());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertAdvance(Scorer expected, Scorer actual, int amount) throws Exception {
|
private void assertAdvance(Scorer expected, Scorer actual, int amount) throws Exception {
|
||||||
if (actual == null) {
|
if (actual == null) {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, expected.nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, expected.iterator().nextDoc());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
DocIdSetIterator expectedIt = expected.iterator();
|
||||||
|
DocIdSetIterator actualIt = actual.iterator();
|
||||||
int prevDoc = 0;
|
int prevDoc = 0;
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = expected.advance(prevDoc+amount)) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = expectedIt.advance(prevDoc+amount)) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
assertEquals(doc, actual.advance(prevDoc+amount));
|
assertEquals(doc, actualIt.advance(prevDoc+amount));
|
||||||
assertEquals(expected.freq(), actual.freq());
|
assertEquals(expected.freq(), actual.freq());
|
||||||
float expectedScore = expected.score();
|
float expectedScore = expected.score();
|
||||||
float actualScore = actual.score();
|
float actualScore = actual.score();
|
||||||
assertEquals(expectedScore, actualScore, CheckHits.explainToleranceDelta(expectedScore, actualScore));
|
assertEquals(expectedScore, actualScore, CheckHits.explainToleranceDelta(expectedScore, actualScore));
|
||||||
prevDoc = doc;
|
prevDoc = doc;
|
||||||
}
|
}
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, actual.advance(prevDoc+amount));
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, actualIt.advance(prevDoc+amount));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** simple test for next(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) */
|
/** simple test for next(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) */
|
||||||
|
@ -360,6 +364,10 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
||||||
return currentDoc;
|
return currentDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
assert currentDoc != NO_MORE_DOCS;
|
assert currentDoc != NO_MORE_DOCS;
|
||||||
|
@ -393,5 +401,12 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return maxDoc;
|
return maxDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return currentDoc;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,14 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
||||||
|
|
||||||
@Override public int docID() { return idx; }
|
@Override public int docID() { return idx; }
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return idx;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public int nextDoc() {
|
@Override public int nextDoc() {
|
||||||
return ++idx != scores.length ? idx : NO_MORE_DOCS;
|
return ++idx != scores.length ? idx : NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
@ -56,6 +64,8 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return scores.length;
|
return scores.length;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The scores must have positive as well as negative values
|
// The scores must have positive as well as negative values
|
||||||
|
@ -90,7 +100,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
||||||
Collector c = new PositiveScoresOnlyCollector(tdc);
|
Collector c = new PositiveScoresOnlyCollector(tdc);
|
||||||
LeafCollector ac = c.getLeafCollector(ir.leaves().get(0));
|
LeafCollector ac = c.getLeafCollector(ir.leaves().get(0));
|
||||||
ac.setScorer(s);
|
ac.setScorer(s);
|
||||||
while (s.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while (s.iterator().nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
ac.collect(0);
|
ac.collect(0);
|
||||||
}
|
}
|
||||||
TopDocs td = tdc.topDocs();
|
TopDocs td = tdc.topDocs();
|
||||||
|
|
|
@ -445,6 +445,15 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return docID;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return 1;
|
return 1;
|
||||||
|
@ -464,6 +473,8 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
docID = target;
|
docID = target;
|
||||||
return docID;
|
return docID;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
|
|
@ -49,6 +49,11 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
|
||||||
|
|
||||||
@Override public int docID() { return doc; }
|
@Override public int docID() { return doc; }
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
@Override public int docID() { return doc; }
|
||||||
|
|
||||||
@Override public int nextDoc() {
|
@Override public int nextDoc() {
|
||||||
return ++doc < scores.length ? doc : NO_MORE_DOCS;
|
return ++doc < scores.length ? doc : NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
@ -62,6 +67,8 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return scores.length;
|
return scores.length;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class ScoreCachingCollector extends SimpleCollector {
|
private static final class ScoreCachingCollector extends SimpleCollector {
|
||||||
|
@ -116,7 +123,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
|
||||||
|
|
||||||
// We need to iterate on the scorer so that its doc() advances.
|
// We need to iterate on the scorer so that its doc() advances.
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = s.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = s.iterator().nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
scc.collect(doc);
|
scc.collect(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -133,11 +133,11 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
||||||
Scorer ts = weight.scorer(context);
|
Scorer ts = weight.scorer(context);
|
||||||
assertTrue("next did not return a doc",
|
assertTrue("next did not return a doc",
|
||||||
ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
ts.iterator().nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
assertTrue("next did not return a doc",
|
assertTrue("next did not return a doc",
|
||||||
ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
ts.iterator().nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
assertTrue("next returned a doc and it should not have",
|
assertTrue("next returned a doc and it should not have",
|
||||||
ts.nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
|
ts.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAdvance() throws Exception {
|
public void testAdvance() throws Exception {
|
||||||
|
@ -149,7 +149,7 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
||||||
Scorer ts = weight.scorer(context);
|
Scorer ts = weight.scorer(context);
|
||||||
assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
|
assertTrue("Didn't skip", ts.iterator().advance(3) != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
// The next doc should be doc 5
|
// The next doc should be doc 5
|
||||||
assertTrue("doc should be number 5", ts.docID() == 5);
|
assertTrue("doc should be number 5", ts.docID() == 5);
|
||||||
}
|
}
|
||||||
|
@ -186,7 +186,7 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
|
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
|
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
|
||||||
try {
|
try {
|
||||||
weight.scorer(forbiddenNorms.getContext()).nextDoc();
|
weight.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
||||||
fail("Should load norms");
|
fail("Should load norms");
|
||||||
} catch (AssertionError e) {
|
} catch (AssertionError e) {
|
||||||
// ok
|
// ok
|
||||||
|
@ -194,6 +194,6 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
|
|
||||||
weight = indexSearcher.createNormalizedWeight(termQuery, false);
|
weight = indexSearcher.createNormalizedWeight(termQuery, false);
|
||||||
// should not fail this time since norms are not necessary
|
// should not fail this time since norms are not necessary
|
||||||
weight.scorer(forbiddenNorms.getContext()).nextDoc();
|
weight.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -236,18 +236,8 @@ public class TestTopFieldCollector extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return scorer.nextDoc();
|
return scorer.iterator();
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return scorer.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return scorer.cost();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -34,10 +34,6 @@ final class JustCompileSearchSpans {
|
||||||
|
|
||||||
static final class JustCompileSpans extends Spans {
|
static final class JustCompileSpans extends Spans {
|
||||||
|
|
||||||
JustCompileSpans() {
|
|
||||||
super(null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.search.spans;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -29,13 +27,9 @@ public class TestFilterSpans extends LuceneTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testOverrides() throws Exception {
|
public void testOverrides() throws Exception {
|
||||||
// verify that all methods of Spans are overridden by FilterSpans,
|
// verify that all methods of Spans are overridden by FilterSpans,
|
||||||
// except those under the 'exclude' list
|
|
||||||
Set<Method> exclude = new HashSet<>();
|
|
||||||
exclude.add(FilterSpans.class.getMethod("freq"));
|
|
||||||
exclude.add(FilterSpans.class.getMethod("score"));
|
|
||||||
for (Method m : FilterSpans.class.getMethods()) {
|
for (Method m : FilterSpans.class.getMethods()) {
|
||||||
if (m.getDeclaringClass() == Spans.class) {
|
if (m.getDeclaringClass() == Spans.class) {
|
||||||
assertTrue("method " + m.getName() + " not overridden!", exclude.contains(m));
|
fail("method " + m.getName() + " not overridden!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -194,7 +194,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
||||||
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
|
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
|
||||||
LeafReaderContext leave = topReaderContext.leaves().get(0);
|
LeafReaderContext leave = topReaderContext.leaves().get(0);
|
||||||
Scorer s = w.scorer(leave);
|
Scorer s = w.scorer(leave);
|
||||||
assertEquals(1, s.advance(1));
|
assertEquals(1, s.iterator().advance(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOverlappedOrderedSpan() throws Exception {
|
public void testOverlappedOrderedSpan() throws Exception {
|
||||||
|
|
|
@ -303,12 +303,12 @@ public class TestSpans extends LuceneTestCase {
|
||||||
searcher.setSimilarity(oldSim);
|
searcher.setSimilarity(oldSim);
|
||||||
}
|
}
|
||||||
if (i == subIndex) {
|
if (i == subIndex) {
|
||||||
assertTrue("first doc", spanScorer.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
assertTrue("first doc", spanScorer.iterator().nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
assertEquals("first doc number", spanScorer.docID() + ctx.docBase, 11);
|
assertEquals("first doc number", spanScorer.docID() + ctx.docBase, 11);
|
||||||
float score = spanScorer.score();
|
float score = spanScorer.score();
|
||||||
assertTrue("first doc score should be zero, " + score, score == 0.0f);
|
assertTrue("first doc score should be zero, " + score, score == 0.0f);
|
||||||
} else {
|
} else {
|
||||||
assertTrue("no second doc", spanScorer == null || spanScorer.nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
|
assertTrue("no second doc", spanScorer == null || spanScorer.iterator().nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.expressions;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
|
||||||
class FakeScorer extends Scorer {
|
class FakeScorer extends Scorer {
|
||||||
|
@ -37,17 +38,7 @@ class FakeScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -128,17 +128,19 @@ public class TestExpressionValueSource extends LuceneTestCase {
|
||||||
|
|
||||||
// everything
|
// everything
|
||||||
ValueSourceScorer scorer = values.getRangeScorer(leaf.reader(), "4", "40", true, true);
|
ValueSourceScorer scorer = values.getRangeScorer(leaf.reader(), "4", "40", true, true);
|
||||||
assertEquals(-1, scorer.docID());
|
DocIdSetIterator iter = scorer.iterator();
|
||||||
assertEquals(0, scorer.nextDoc());
|
assertEquals(-1, iter.docID());
|
||||||
assertEquals(1, scorer.nextDoc());
|
assertEquals(0, iter.nextDoc());
|
||||||
assertEquals(2, scorer.nextDoc());
|
assertEquals(1, iter.nextDoc());
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.nextDoc());
|
assertEquals(2, iter.nextDoc());
|
||||||
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, iter.nextDoc());
|
||||||
|
|
||||||
// just the first doc
|
// just the first doc
|
||||||
scorer = values.getRangeScorer(leaf.reader(), "4", "40", false, false);
|
scorer = values.getRangeScorer(leaf.reader(), "4", "40", false, false);
|
||||||
|
iter = scorer.iterator();
|
||||||
assertEquals(-1, scorer.docID());
|
assertEquals(-1, scorer.docID());
|
||||||
assertEquals(0, scorer.nextDoc());
|
assertEquals(0, iter.nextDoc());
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, iter.nextDoc());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEquals() throws Exception {
|
public void testEquals() throws Exception {
|
||||||
|
|
|
@ -44,6 +44,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
|
|
||||||
// DrillDown DocsEnums:
|
// DrillDown DocsEnums:
|
||||||
private final Scorer baseScorer;
|
private final Scorer baseScorer;
|
||||||
|
private final DocIdSetIterator baseIterator;
|
||||||
|
|
||||||
private final LeafReaderContext context;
|
private final LeafReaderContext context;
|
||||||
|
|
||||||
|
@ -60,13 +61,14 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
this.dims = dims;
|
this.dims = dims;
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.baseScorer = baseScorer;
|
this.baseScorer = baseScorer;
|
||||||
|
this.baseIterator = baseScorer.iterator();
|
||||||
this.drillDownCollector = drillDownCollector;
|
this.drillDownCollector = drillDownCollector;
|
||||||
this.scoreSubDocsAtOnce = scoreSubDocsAtOnce;
|
this.scoreSubDocsAtOnce = scoreSubDocsAtOnce;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return baseScorer.cost();
|
return baseIterator.cost();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -94,13 +96,8 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
dim.sidewaysLeafCollector.setScorer(scorer);
|
dim.sidewaysLeafCollector.setScorer(scorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: if we ever allow null baseScorer ... it will
|
|
||||||
// mean we DO score docs out of order ... hmm, or if we
|
|
||||||
// change up the order of the conjuntions below
|
|
||||||
assert baseScorer != null;
|
|
||||||
|
|
||||||
// some scorers, eg ReqExlScorer, can hit NPE if cost is called after nextDoc
|
// some scorers, eg ReqExlScorer, can hit NPE if cost is called after nextDoc
|
||||||
long baseQueryCost = baseScorer.cost();
|
long baseQueryCost = baseIterator.cost();
|
||||||
|
|
||||||
final int numDims = dims.length;
|
final int numDims = dims.length;
|
||||||
|
|
||||||
|
@ -115,7 +112,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Position all scorers to their first matching doc:
|
// Position all scorers to their first matching doc:
|
||||||
baseScorer.nextDoc();
|
baseIterator.nextDoc();
|
||||||
for (DocsAndCost dim : dims) {
|
for (DocsAndCost dim : dims) {
|
||||||
dim.approximation.nextDoc();
|
dim.approximation.nextDoc();
|
||||||
}
|
}
|
||||||
|
@ -157,7 +154,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
|
|
||||||
nextDoc: while (docID != PostingsEnum.NO_MORE_DOCS) {
|
nextDoc: while (docID != PostingsEnum.NO_MORE_DOCS) {
|
||||||
if (acceptDocs != null && acceptDocs.get(docID) == false) {
|
if (acceptDocs != null && acceptDocs.get(docID) == false) {
|
||||||
docID = baseScorer.nextDoc();
|
docID = baseIterator.nextDoc();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
LeafCollector failedCollector = null;
|
LeafCollector failedCollector = null;
|
||||||
|
@ -182,7 +179,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
// More than one dim fails on this document, so
|
// More than one dim fails on this document, so
|
||||||
// it's neither a hit nor a near-miss; move to
|
// it's neither a hit nor a near-miss; move to
|
||||||
// next doc:
|
// next doc:
|
||||||
docID = baseScorer.nextDoc();
|
docID = baseIterator.nextDoc();
|
||||||
continue nextDoc;
|
continue nextDoc;
|
||||||
} else {
|
} else {
|
||||||
failedCollector = dim.sidewaysLeafCollector;
|
failedCollector = dim.sidewaysLeafCollector;
|
||||||
|
@ -204,7 +201,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
collectNearMiss(failedCollector);
|
collectNearMiss(failedCollector);
|
||||||
}
|
}
|
||||||
|
|
||||||
docID = baseScorer.nextDoc();
|
docID = baseIterator.nextDoc();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -316,9 +313,9 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
int ddDocID = docIDs[slot0];
|
int ddDocID = docIDs[slot0];
|
||||||
assert ddDocID != -1;
|
assert ddDocID != -1;
|
||||||
|
|
||||||
int baseDocID = baseScorer.docID();
|
int baseDocID = baseIterator.docID();
|
||||||
if (baseDocID < ddDocID) {
|
if (baseDocID < ddDocID) {
|
||||||
baseDocID = baseScorer.advance(ddDocID);
|
baseDocID = baseIterator.advance(ddDocID);
|
||||||
}
|
}
|
||||||
if (baseDocID == ddDocID) {
|
if (baseDocID == ddDocID) {
|
||||||
//if (DEBUG) {
|
//if (DEBUG) {
|
||||||
|
@ -437,7 +434,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
// System.out.println("\ncycle nextChunkStart=" + nextChunkStart + " docIds[0]=" + docIDs[0]);
|
// System.out.println("\ncycle nextChunkStart=" + nextChunkStart + " docIds[0]=" + docIDs[0]);
|
||||||
//}
|
//}
|
||||||
int filledCount = 0;
|
int filledCount = 0;
|
||||||
int docID = baseScorer.docID();
|
int docID = baseIterator.docID();
|
||||||
//if (DEBUG) {
|
//if (DEBUG) {
|
||||||
// System.out.println(" base docID=" + docID);
|
// System.out.println(" base docID=" + docID);
|
||||||
//}
|
//}
|
||||||
|
@ -456,7 +453,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
missingDims[slot] = 0;
|
missingDims[slot] = 0;
|
||||||
counts[slot] = 1;
|
counts[slot] = 1;
|
||||||
}
|
}
|
||||||
docID = baseScorer.nextDoc();
|
docID = baseIterator.nextDoc();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filledCount == 0) {
|
if (filledCount == 0) {
|
||||||
|
@ -590,11 +587,6 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
super(null);
|
super(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) {
|
|
||||||
throw new UnsupportedOperationException("FakeScorer doesn't support advance(int)");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return collectDocID;
|
return collectDocID;
|
||||||
|
@ -606,7 +598,7 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException("FakeScorer doesn't support nextDoc()");
|
throw new UnsupportedOperationException("FakeScorer doesn't support nextDoc()");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -615,11 +607,6 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
return collectScore;
|
return collectScore;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return baseScorer.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<ChildScorer> getChildren() {
|
public Collection<ChildScorer> getChildren() {
|
||||||
return Collections.singletonList(new ChildScorer(baseScorer, "MUST"));
|
return Collections.singletonList(new ChildScorer(baseScorer, "MUST"));
|
||||||
|
@ -640,9 +627,9 @@ class DrillSidewaysScorer extends BulkScorer {
|
||||||
LeafCollector sidewaysLeafCollector;
|
LeafCollector sidewaysLeafCollector;
|
||||||
|
|
||||||
DocsAndCost(Scorer scorer, Collector sidewaysCollector) {
|
DocsAndCost(Scorer scorer, Collector sidewaysCollector) {
|
||||||
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
|
final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator();
|
||||||
if (twoPhase == null) {
|
if (twoPhase == null) {
|
||||||
this.approximation = scorer;
|
this.approximation = scorer.iterator();
|
||||||
this.twoPhase = null;
|
this.twoPhase = null;
|
||||||
} else {
|
} else {
|
||||||
this.approximation = twoPhase.approximation();
|
this.approximation = twoPhase.approximation();
|
||||||
|
|
|
@ -164,10 +164,11 @@ public final class DoubleRange extends Range {
|
||||||
if (fastMatchWeight == null) {
|
if (fastMatchWeight == null) {
|
||||||
approximation = DocIdSetIterator.all(maxDoc);
|
approximation = DocIdSetIterator.all(maxDoc);
|
||||||
} else {
|
} else {
|
||||||
approximation = fastMatchWeight.scorer(context);
|
Scorer s = fastMatchWeight.scorer(context);
|
||||||
if (approximation == null) {
|
if (s == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
approximation = s.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
final FunctionValues values = valueSource.getValues(Collections.emptyMap(), context);
|
final FunctionValues values = valueSource.getValues(Collections.emptyMap(), context);
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
|
|
||||||
|
@ -105,10 +106,11 @@ public class DoubleRangeFacetCounts extends RangeFacetCounts {
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, false);
|
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, false);
|
||||||
fastMatchDocs = fastMatchWeight.scorer(hits.context);
|
Scorer s = fastMatchWeight.scorer(hits.context);
|
||||||
if (fastMatchDocs == null) {
|
if (s == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
fastMatchDocs = s.iterator();
|
||||||
} else {
|
} else {
|
||||||
fastMatchDocs = null;
|
fastMatchDocs = null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,10 +156,11 @@ public final class LongRange extends Range {
|
||||||
if (fastMatchWeight == null) {
|
if (fastMatchWeight == null) {
|
||||||
approximation = DocIdSetIterator.all(maxDoc);
|
approximation = DocIdSetIterator.all(maxDoc);
|
||||||
} else {
|
} else {
|
||||||
approximation = fastMatchWeight.scorer(context);
|
Scorer s = fastMatchWeight.scorer(context);
|
||||||
if (approximation == null) {
|
if (s == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
approximation = s.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
final FunctionValues values = valueSource.getValues(Collections.emptyMap(), context);
|
final FunctionValues values = valueSource.getValues(Collections.emptyMap(), context);
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
|
|
||||||
/** {@link Facets} implementation that computes counts for
|
/** {@link Facets} implementation that computes counts for
|
||||||
|
@ -86,10 +87,11 @@ public class LongRangeFacetCounts extends RangeFacetCounts {
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, false);
|
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, false);
|
||||||
fastMatchDocs = fastMatchWeight.scorer(hits.context);
|
Scorer s = fastMatchWeight.scorer(hits.context);
|
||||||
if (fastMatchDocs == null) {
|
if (s == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
fastMatchDocs = s.iterator();
|
||||||
} else {
|
} else {
|
||||||
fastMatchDocs = null;
|
fastMatchDocs = null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.facet.taxonomy;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
|
||||||
class FakeScorer extends Scorer {
|
class FakeScorer extends Scorer {
|
||||||
|
@ -37,17 +38,7 @@ class FakeScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -478,7 +478,12 @@ public class BlockGroupingCollector extends SimpleCollector {
|
||||||
subDocUpto = 0;
|
subDocUpto = 0;
|
||||||
docBase = readerContext.docBase;
|
docBase = readerContext.docBase;
|
||||||
//System.out.println("setNextReader base=" + docBase + " r=" + readerContext.reader);
|
//System.out.println("setNextReader base=" + docBase + " r=" + readerContext.reader);
|
||||||
lastDocPerGroupBits = lastDocPerGroup.scorer(readerContext);
|
Scorer s = lastDocPerGroup.scorer(readerContext);
|
||||||
|
if (s == null) {
|
||||||
|
lastDocPerGroupBits = null;
|
||||||
|
} else {
|
||||||
|
lastDocPerGroupBits = s.iterator();
|
||||||
|
}
|
||||||
groupEndDocID = -1;
|
groupEndDocID = -1;
|
||||||
|
|
||||||
currentReaderContext = readerContext;
|
currentReaderContext = readerContext;
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.search.grouping;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
|
||||||
class FakeScorer extends Scorer {
|
class FakeScorer extends Scorer {
|
||||||
|
@ -37,17 +38,7 @@ class FakeScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,21 +22,20 @@ import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.TwoPhaseIterator;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.LongBitSet;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
abstract class BaseGlobalOrdinalScorer extends Scorer {
|
abstract class BaseGlobalOrdinalScorer extends Scorer {
|
||||||
|
|
||||||
final SortedDocValues values;
|
final SortedDocValues values;
|
||||||
final Scorer approximationScorer;
|
final DocIdSetIterator approximation;
|
||||||
|
|
||||||
float score;
|
float score;
|
||||||
|
|
||||||
public BaseGlobalOrdinalScorer(Weight weight, SortedDocValues values, Scorer approximationScorer) {
|
public BaseGlobalOrdinalScorer(Weight weight, SortedDocValues values, DocIdSetIterator approximationScorer) {
|
||||||
super(weight);
|
super(weight);
|
||||||
this.values = values;
|
this.values = values;
|
||||||
this.approximationScorer = approximationScorer;
|
this.approximation = approximationScorer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -46,45 +45,19 @@ abstract class BaseGlobalOrdinalScorer extends Scorer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return approximationScorer.docID();
|
return approximation.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return advance(approximationScorer.docID() + 1);
|
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
final DocIdSetIterator approximation = new DocIdSetIterator() {
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return approximationScorer.docID();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return approximationScorer.nextDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return approximationScorer.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return approximationScorer.cost();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return createTwoPhaseIterator(approximation);
|
return createTwoPhaseIterator(approximation);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return approximationScorer.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int freq() throws IOException {
|
public int freq() throws IOException {
|
||||||
return 1;
|
return 1;
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.search.join;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
|
||||||
class FakeScorer extends Scorer {
|
class FakeScorer extends Scorer {
|
||||||
|
@ -37,17 +38,7 @@ class FakeScorer extends Scorer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -135,9 +135,9 @@ final class GlobalOrdinalsQuery extends Query {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (globalOrds != null) {
|
if (globalOrds != null) {
|
||||||
return new OrdinalMapScorer(this, score(), foundOrds, values, approximationScorer, globalOrds.getGlobalOrds(context.ord));
|
return new OrdinalMapScorer(this, score(), foundOrds, values, approximationScorer.iterator(), globalOrds.getGlobalOrds(context.ord));
|
||||||
} {
|
} {
|
||||||
return new SegmentOrdinalScorer(this, score(), foundOrds, values, approximationScorer);
|
return new SegmentOrdinalScorer(this, score(), foundOrds, values, approximationScorer.iterator());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,34 +148,20 @@ final class GlobalOrdinalsQuery extends Query {
|
||||||
final LongBitSet foundOrds;
|
final LongBitSet foundOrds;
|
||||||
final LongValues segmentOrdToGlobalOrdLookup;
|
final LongValues segmentOrdToGlobalOrdLookup;
|
||||||
|
|
||||||
public OrdinalMapScorer(Weight weight, float score, LongBitSet foundOrds, SortedDocValues values, Scorer approximationScorer, LongValues segmentOrdToGlobalOrdLookup) {
|
public OrdinalMapScorer(Weight weight, float score, LongBitSet foundOrds, SortedDocValues values, DocIdSetIterator approximationScorer, LongValues segmentOrdToGlobalOrdLookup) {
|
||||||
super(weight, values, approximationScorer);
|
super(weight, values, approximationScorer);
|
||||||
this.score = score;
|
this.score = score;
|
||||||
this.foundOrds = foundOrds;
|
this.foundOrds = foundOrds;
|
||||||
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) {
|
|
||||||
final long segmentOrd = values.getOrd(docID);
|
|
||||||
if (segmentOrd != -1) {
|
|
||||||
final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
|
||||||
if (foundOrds.get(globalOrd)) {
|
|
||||||
return docID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
||||||
return new TwoPhaseIterator(approximation) {
|
return new TwoPhaseIterator(approximation) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
final long segmentOrd = values.getOrd(approximationScorer.docID());
|
final long segmentOrd = values.getOrd(approximation.docID());
|
||||||
if (segmentOrd != -1) {
|
if (segmentOrd != -1) {
|
||||||
final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
||||||
if (foundOrds.get(globalOrd)) {
|
if (foundOrds.get(globalOrd)) {
|
||||||
|
@ -197,32 +183,19 @@ final class GlobalOrdinalsQuery extends Query {
|
||||||
|
|
||||||
final LongBitSet foundOrds;
|
final LongBitSet foundOrds;
|
||||||
|
|
||||||
public SegmentOrdinalScorer(Weight weight, float score, LongBitSet foundOrds, SortedDocValues values, Scorer approximationScorer) {
|
public SegmentOrdinalScorer(Weight weight, float score, LongBitSet foundOrds, SortedDocValues values, DocIdSetIterator approximationScorer) {
|
||||||
super(weight, values, approximationScorer);
|
super(weight, values, approximationScorer);
|
||||||
this.score = score;
|
this.score = score;
|
||||||
this.foundOrds = foundOrds;
|
this.foundOrds = foundOrds;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) {
|
|
||||||
final long segmentOrd = values.getOrd(docID);
|
|
||||||
if (segmentOrd != -1) {
|
|
||||||
if (foundOrds.get(segmentOrd)) {
|
|
||||||
return docID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
||||||
return new TwoPhaseIterator(approximation) {
|
return new TwoPhaseIterator(approximation) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
final long segmentOrd = values.getOrd(approximationScorer.docID());
|
final long segmentOrd = values.getOrd(approximation.docID());
|
||||||
if (segmentOrd != -1) {
|
if (segmentOrd != -1) {
|
||||||
if (foundOrds.get(segmentOrd)) {
|
if (foundOrds.get(segmentOrd)) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -160,9 +160,9 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
||||||
if (approximationScorer == null) {
|
if (approximationScorer == null) {
|
||||||
return null;
|
return null;
|
||||||
} else if (globalOrds != null) {
|
} else if (globalOrds != null) {
|
||||||
return new OrdinalMapScorer(this, collector, values, approximationScorer, globalOrds.getGlobalOrds(context.ord));
|
return new OrdinalMapScorer(this, collector, values, approximationScorer.iterator(), globalOrds.getGlobalOrds(context.ord));
|
||||||
} else {
|
} else {
|
||||||
return new SegmentOrdinalScorer(this, collector, values, approximationScorer);
|
return new SegmentOrdinalScorer(this, collector, values, approximationScorer.iterator());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,34 +173,19 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
||||||
final LongValues segmentOrdToGlobalOrdLookup;
|
final LongValues segmentOrdToGlobalOrdLookup;
|
||||||
final GlobalOrdinalsWithScoreCollector collector;
|
final GlobalOrdinalsWithScoreCollector collector;
|
||||||
|
|
||||||
public OrdinalMapScorer(Weight weight, GlobalOrdinalsWithScoreCollector collector, SortedDocValues values, Scorer approximationScorer, LongValues segmentOrdToGlobalOrdLookup) {
|
public OrdinalMapScorer(Weight weight, GlobalOrdinalsWithScoreCollector collector, SortedDocValues values, DocIdSetIterator approximation, LongValues segmentOrdToGlobalOrdLookup) {
|
||||||
super(weight, values, approximationScorer);
|
super(weight, values, approximation);
|
||||||
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
||||||
this.collector = collector;
|
this.collector = collector;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) {
|
|
||||||
final long segmentOrd = values.getOrd(docID);
|
|
||||||
if (segmentOrd != -1) {
|
|
||||||
final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
|
||||||
if (collector.match(globalOrd)) {
|
|
||||||
score = collector.score(globalOrd);
|
|
||||||
return docID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
||||||
return new TwoPhaseIterator(approximation) {
|
return new TwoPhaseIterator(approximation) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
final long segmentOrd = values.getOrd(approximationScorer.docID());
|
final long segmentOrd = values.getOrd(approximation.docID());
|
||||||
if (segmentOrd != -1) {
|
if (segmentOrd != -1) {
|
||||||
final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(segmentOrd);
|
||||||
if (collector.match(globalOrd)) {
|
if (collector.match(globalOrd)) {
|
||||||
|
@ -223,32 +208,18 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
||||||
|
|
||||||
final GlobalOrdinalsWithScoreCollector collector;
|
final GlobalOrdinalsWithScoreCollector collector;
|
||||||
|
|
||||||
public SegmentOrdinalScorer(Weight weight, GlobalOrdinalsWithScoreCollector collector, SortedDocValues values, Scorer approximationScorer) {
|
public SegmentOrdinalScorer(Weight weight, GlobalOrdinalsWithScoreCollector collector, SortedDocValues values, DocIdSetIterator approximation) {
|
||||||
super(weight, values, approximationScorer);
|
super(weight, values, approximation);
|
||||||
this.collector = collector;
|
this.collector = collector;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) {
|
|
||||||
final int segmentOrd = values.getOrd(docID);
|
|
||||||
if (segmentOrd != -1) {
|
|
||||||
if (collector.match(segmentOrd)) {
|
|
||||||
score = collector.score(segmentOrd);
|
|
||||||
return docID;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
protected TwoPhaseIterator createTwoPhaseIterator(DocIdSetIterator approximation) {
|
||||||
return new TwoPhaseIterator(approximation) {
|
return new TwoPhaseIterator(approximation) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
final int segmentOrd = values.getOrd(approximationScorer.docID());
|
final int segmentOrd = values.getOrd(approximation.docID());
|
||||||
if (segmentOrd != -1) {
|
if (segmentOrd != -1) {
|
||||||
if (collector.match(segmentOrd)) {
|
if (collector.match(segmentOrd)) {
|
||||||
score = collector.score(segmentOrd);
|
score = collector.score(segmentOrd);
|
||||||
|
|
|
@ -27,9 +27,9 @@ import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.ReaderUtil;
|
import org.apache.lucene.index.ReaderUtil;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.BitDocIdSet;
|
import org.apache.lucene.util.BitDocIdSet;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
|
@ -68,12 +68,12 @@ public class QueryBitSetProducer implements BitSetProducer {
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query, false);
|
final Weight weight = searcher.createNormalizedWeight(query, false);
|
||||||
final DocIdSetIterator it = weight.scorer(context);
|
final Scorer s = weight.scorer(context);
|
||||||
|
|
||||||
if (it == null) {
|
if (s == null) {
|
||||||
docIdSet = DocIdSet.EMPTY;
|
docIdSet = DocIdSet.EMPTY;
|
||||||
} else {
|
} else {
|
||||||
docIdSet = new BitDocIdSet(BitSet.of(it, context.reader().maxDoc()));
|
docIdSet = new BitDocIdSet(BitSet.of(s.iterator(), context.reader().maxDoc()));
|
||||||
}
|
}
|
||||||
cache.put(key, docIdSet);
|
cache.put(key, docIdSet);
|
||||||
}
|
}
|
||||||
|
|
|
@ -180,8 +180,6 @@ class TermsIncludingScoreQuery extends Query {
|
||||||
final float[] scores;
|
final float[] scores;
|
||||||
final long cost;
|
final long cost;
|
||||||
|
|
||||||
int currentDoc = -1;
|
|
||||||
|
|
||||||
SVInOrderScorer(Weight weight, TermsEnum termsEnum, int maxDoc, long cost) throws IOException {
|
SVInOrderScorer(Weight weight, TermsEnum termsEnum, int maxDoc, long cost) throws IOException {
|
||||||
super(weight);
|
super(weight);
|
||||||
FixedBitSet matchingDocs = new FixedBitSet(maxDoc);
|
FixedBitSet matchingDocs = new FixedBitSet(maxDoc);
|
||||||
|
@ -210,7 +208,7 @@ class TermsIncludingScoreQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
return scores[currentDoc];
|
return scores[docID()];
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -220,23 +218,14 @@ class TermsIncludingScoreQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return currentDoc;
|
return matchingDocsIterator.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public DocIdSetIterator iterator() {
|
||||||
return currentDoc = matchingDocsIterator.nextDoc();
|
return matchingDocsIterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return currentDoc = matchingDocsIterator.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return cost;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// This scorer deals with the fact that a document can have more than one score from multiple related documents.
|
// This scorer deals with the fact that a document can have more than one score from multiple related documents.
|
||||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Set;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
@ -141,7 +142,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
ToChildBlockJoinScorer scorer = (ToChildBlockJoinScorer) scorer(context);
|
ToChildBlockJoinScorer scorer = (ToChildBlockJoinScorer) scorer(context);
|
||||||
if (scorer != null && scorer.advance(doc) == doc) {
|
if (scorer != null && scorer.iterator().advance(doc) == doc) {
|
||||||
int parentDoc = scorer.getParentDoc();
|
int parentDoc = scorer.getParentDoc();
|
||||||
return Explanation.match(
|
return Explanation.match(
|
||||||
scorer.score(),
|
scorer.score(),
|
||||||
|
@ -155,6 +156,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
|
|
||||||
static class ToChildBlockJoinScorer extends Scorer {
|
static class ToChildBlockJoinScorer extends Scorer {
|
||||||
private final Scorer parentScorer;
|
private final Scorer parentScorer;
|
||||||
|
private final DocIdSetIterator parentIt;
|
||||||
private final BitSet parentBits;
|
private final BitSet parentBits;
|
||||||
private final boolean doScores;
|
private final boolean doScores;
|
||||||
|
|
||||||
|
@ -169,6 +171,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
this.doScores = doScores;
|
this.doScores = doScores;
|
||||||
this.parentBits = parentBits;
|
this.parentBits = parentBits;
|
||||||
this.parentScorer = parentScorer;
|
this.parentScorer = parentScorer;
|
||||||
|
this.parentIt = parentScorer.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -176,6 +179,15 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
return Collections.singleton(new ChildScorer(parentScorer, "BLOCK_JOIN"));
|
return Collections.singleton(new ChildScorer(parentScorer, "BLOCK_JOIN"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return childDoc;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
//System.out.println("Q.nextDoc() parentDoc=" + parentDoc + " childDoc=" + childDoc);
|
//System.out.println("Q.nextDoc() parentDoc=" + parentDoc + " childDoc=" + childDoc);
|
||||||
|
@ -188,7 +200,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
// to skip over some number of parents w/ no
|
// to skip over some number of parents w/ no
|
||||||
// children:
|
// children:
|
||||||
while (true) {
|
while (true) {
|
||||||
parentDoc = parentScorer.nextDoc();
|
parentDoc = parentIt.nextDoc();
|
||||||
validateParentDoc();
|
validateParentDoc();
|
||||||
|
|
||||||
if (parentDoc == 0) {
|
if (parentDoc == 0) {
|
||||||
|
@ -197,7 +209,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
// into ctor so we can skip this if... but it's
|
// into ctor so we can skip this if... but it's
|
||||||
// tricky because scorer must return -1 for
|
// tricky because scorer must return -1 for
|
||||||
// .doc() on init...
|
// .doc() on init...
|
||||||
parentDoc = parentScorer.nextDoc();
|
parentDoc = parentIt.nextDoc();
|
||||||
validateParentDoc();
|
validateParentDoc();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,36 +248,13 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Detect mis-use, where provided parent query in fact
|
|
||||||
* sometimes returns child documents. */
|
|
||||||
private void validateParentDoc() {
|
|
||||||
if (parentDoc != NO_MORE_DOCS && !parentBits.get(parentDoc)) {
|
|
||||||
throw new IllegalStateException(INVALID_QUERY_MESSAGE + parentDoc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return childDoc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public float score() throws IOException {
|
|
||||||
return parentScore;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int freq() throws IOException {
|
|
||||||
return parentFreq;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int childTarget) throws IOException {
|
public int advance(int childTarget) throws IOException {
|
||||||
if (childTarget >= parentDoc) {
|
if (childTarget >= parentDoc) {
|
||||||
if (childTarget == NO_MORE_DOCS) {
|
if (childTarget == NO_MORE_DOCS) {
|
||||||
return childDoc = parentDoc = NO_MORE_DOCS;
|
return childDoc = parentDoc = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
parentDoc = parentScorer.advance(childTarget + 1);
|
parentDoc = parentIt.advance(childTarget + 1);
|
||||||
validateParentDoc();
|
validateParentDoc();
|
||||||
|
|
||||||
if (parentDoc == NO_MORE_DOCS) {
|
if (parentDoc == NO_MORE_DOCS) {
|
||||||
|
@ -281,7 +270,7 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// parent with no children, move to the next one
|
// parent with no children, move to the next one
|
||||||
parentDoc = parentScorer.nextDoc();
|
parentDoc = parentIt.nextDoc();
|
||||||
validateParentDoc();
|
validateParentDoc();
|
||||||
if (parentDoc == NO_MORE_DOCS) {
|
if (parentDoc == NO_MORE_DOCS) {
|
||||||
return childDoc = NO_MORE_DOCS;
|
return childDoc = NO_MORE_DOCS;
|
||||||
|
@ -303,7 +292,32 @@ public class ToChildBlockJoinQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public long cost() {
|
||||||
return parentScorer.cost();
|
return parentIt.cost();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Detect mis-use, where provided parent query in fact
|
||||||
|
* sometimes returns child documents. */
|
||||||
|
private void validateParentDoc() {
|
||||||
|
if (parentDoc != DocIdSetIterator.NO_MORE_DOCS && !parentBits.get(parentDoc)) {
|
||||||
|
throw new IllegalStateException(INVALID_QUERY_MESSAGE + parentDoc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return childDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float score() throws IOException {
|
||||||
|
return parentScore;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int freq() throws IOException {
|
||||||
|
return parentFreq;
|
||||||
}
|
}
|
||||||
|
|
||||||
int getParentDoc() {
|
int getParentDoc() {
|
||||||
|
|
|
@ -61,7 +61,8 @@ public class ToParentBlockJoinIndexSearcher extends IndexSearcher {
|
||||||
final LeafCollector leafCollector = collector.getLeafCollector(ctx);
|
final LeafCollector leafCollector = collector.getLeafCollector(ctx);
|
||||||
leafCollector.setScorer(scorer);
|
leafCollector.setScorer(scorer);
|
||||||
final Bits liveDocs = ctx.reader().getLiveDocs();
|
final Bits liveDocs = ctx.reader().getLiveDocs();
|
||||||
for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
|
final DocIdSetIterator it = scorer.iterator();
|
||||||
|
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
|
||||||
if (liveDocs == null || liveDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
leafCollector.collect(doc);
|
leafCollector.collect(doc);
|
||||||
}
|
}
|
||||||
|
|
|
@ -163,7 +163,7 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int firstChildDoc = childScorer.nextDoc();
|
final int firstChildDoc = childScorer.iterator().nextDoc();
|
||||||
if (firstChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
|
if (firstChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
// No matches
|
// No matches
|
||||||
return null;
|
return null;
|
||||||
|
@ -184,7 +184,7 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context);
|
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context);
|
||||||
if (scorer != null && scorer.advance(doc) == doc) {
|
if (scorer != null && scorer.iterator().advance(doc) == doc) {
|
||||||
return scorer.explain(context.docBase);
|
return scorer.explain(context.docBase);
|
||||||
}
|
}
|
||||||
return Explanation.noMatch("Not a match");
|
return Explanation.noMatch("Not a match");
|
||||||
|
@ -249,6 +249,11 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return new DocIdSetIterator() {
|
||||||
|
final DocIdSetIterator childIt = childScorer.iterator();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
//System.out.println("Q.nextDoc() nextChildDoc=" + nextChildDoc);
|
//System.out.println("Q.nextDoc() nextChildDoc=" + nextChildDoc);
|
||||||
|
@ -302,7 +307,7 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
parentFreq += childFreq;
|
parentFreq += childFreq;
|
||||||
}
|
}
|
||||||
childDocUpto++;
|
childDocUpto++;
|
||||||
nextChildDoc = childScorer.nextDoc();
|
nextChildDoc = childIt.nextDoc();
|
||||||
} while (nextChildDoc < parentDoc);
|
} while (nextChildDoc < parentDoc);
|
||||||
|
|
||||||
// Parent & child docs are supposed to be
|
// Parent & child docs are supposed to be
|
||||||
|
@ -332,21 +337,6 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
return parentDoc;
|
return parentDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return parentDoc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public float score() throws IOException {
|
|
||||||
return parentScore;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int freq() {
|
|
||||||
return parentFreq;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int parentTarget) throws IOException {
|
public int advance(int parentTarget) throws IOException {
|
||||||
|
|
||||||
|
@ -370,7 +360,7 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
//System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc);
|
//System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc);
|
||||||
assert prevParentDoc >= parentDoc;
|
assert prevParentDoc >= parentDoc;
|
||||||
if (prevParentDoc > nextChildDoc) {
|
if (prevParentDoc > nextChildDoc) {
|
||||||
nextChildDoc = childScorer.advance(prevParentDoc);
|
nextChildDoc = childIt.advance(prevParentDoc);
|
||||||
// System.out.println(" childScorer advanced to child docID=" + nextChildDoc);
|
// System.out.println(" childScorer advanced to child docID=" + nextChildDoc);
|
||||||
//} else {
|
//} else {
|
||||||
//System.out.println(" skip childScorer advance");
|
//System.out.println(" skip childScorer advance");
|
||||||
|
@ -386,6 +376,33 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
return nd;
|
return nd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return parentDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long cost() {
|
||||||
|
return childIt.cost();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int docID() {
|
||||||
|
return parentDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float score() throws IOException {
|
||||||
|
return parentScore;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int freq() {
|
||||||
|
return parentFreq;
|
||||||
|
}
|
||||||
|
|
||||||
public Explanation explain(int docBase) throws IOException {
|
public Explanation explain(int docBase) throws IOException {
|
||||||
int start = docBase + prevParentDoc + 1; // +1 b/c prevParentDoc is previous parent doc
|
int start = docBase + prevParentDoc + 1; // +1 b/c prevParentDoc is previous parent doc
|
||||||
int end = docBase + parentDoc - 1; // -1 b/c parentDoc is parent doc
|
int end = docBase + parentDoc - 1; // -1 b/c parentDoc is parent doc
|
||||||
|
@ -393,11 +410,6 @@ public class ToParentBlockJoinQuery extends Query {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return childScorer.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instructs this scorer to keep track of the child docIds and score ids for retrieval purposes.
|
* Instructs this scorer to keep track of the child docIds and score ids for retrieval purposes.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -63,6 +63,7 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.QueryUtils;
|
import org.apache.lucene.search.QueryUtils;
|
||||||
import org.apache.lucene.search.RandomApproximationQuery;
|
import org.apache.lucene.search.RandomApproximationQuery;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
@ -1143,8 +1144,8 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
|
||||||
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
||||||
Weight weight = s.createNormalizedWeight(q, true);
|
Weight weight = s.createNormalizedWeight(q, true);
|
||||||
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
assertEquals(1, disi.advance(1));
|
assertEquals(1, sc.iterator().advance(1));
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
@ -1177,8 +1178,8 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
|
||||||
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
||||||
Weight weight = s.createNormalizedWeight(q, true);
|
Weight weight = s.createNormalizedWeight(q, true);
|
||||||
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
assertEquals(2, disi.advance(0));
|
assertEquals(2, sc.iterator().advance(0));
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
@ -1657,12 +1658,12 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentFilter);
|
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentFilter);
|
||||||
|
|
||||||
Weight weight = s.createNormalizedWeight(parentJoinQuery, random().nextBoolean());
|
Weight weight = s.createNormalizedWeight(parentJoinQuery, random().nextBoolean());
|
||||||
DocIdSetIterator advancingScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer advancingScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
DocIdSetIterator nextDocScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer nextDocScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
final int firstKid = nextDocScorer.nextDoc();
|
final int firstKid = nextDocScorer.iterator().nextDoc();
|
||||||
assertTrue("firstKid not found", DocIdSetIterator.NO_MORE_DOCS != firstKid);
|
assertTrue("firstKid not found", DocIdSetIterator.NO_MORE_DOCS != firstKid);
|
||||||
assertEquals(firstKid, advancingScorer.advance(0));
|
assertEquals(firstKid, advancingScorer.iterator().advance(0));
|
||||||
|
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
|
|
@ -140,7 +140,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
|
||||||
} while (parentDocs.get(target + 1));
|
} while (parentDocs.get(target + 1));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
scorer.advance(target);
|
scorer.iterator().advance(target);
|
||||||
fail();
|
fail();
|
||||||
} catch (IllegalStateException expected) {
|
} catch (IllegalStateException expected) {
|
||||||
assertTrue(expected.getMessage() != null && expected.getMessage().contains(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE));
|
assertTrue(expected.getMessage() != null && expected.getMessage().contains(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE));
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.TermRangeQuery;
|
import org.apache.lucene.search.TermRangeQuery;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -134,9 +135,9 @@ public class PKIndexSplitter {
|
||||||
final int maxDoc = in.maxDoc();
|
final int maxDoc = in.maxDoc();
|
||||||
final FixedBitSet bits = new FixedBitSet(maxDoc);
|
final FixedBitSet bits = new FixedBitSet(maxDoc);
|
||||||
// ignore livedocs here, as we filter them later:
|
// ignore livedocs here, as we filter them later:
|
||||||
final DocIdSetIterator preserveIt = preserveWeight.scorer(context);
|
final Scorer preverveScorer = preserveWeight.scorer(context);
|
||||||
if (preserveIt != null) {
|
if (preverveScorer != null) {
|
||||||
bits.or(preserveIt);
|
bits.or(preverveScorer.iterator());
|
||||||
}
|
}
|
||||||
if (negateFilter) {
|
if (negateFilter) {
|
||||||
bits.flip(0, maxDoc);
|
bits.flip(0, maxDoc);
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.LeafFieldComparator;
|
import org.apache.lucene.search.LeafFieldComparator;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
|
@ -269,18 +270,7 @@ final class Sorter {
|
||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public DocIdSetIterator iterator() {
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -122,11 +122,11 @@ public class BlockJoinComparatorSource extends FieldComparatorSource {
|
||||||
IndexSearcher searcher = new IndexSearcher(ReaderUtil.getTopLevelContext(context));
|
IndexSearcher searcher = new IndexSearcher(ReaderUtil.getTopLevelContext(context));
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight weight = searcher.createNormalizedWeight(parentsFilter, false);
|
final Weight weight = searcher.createNormalizedWeight(parentsFilter, false);
|
||||||
final DocIdSetIterator parents = weight.scorer(context);
|
final Scorer parents = weight.scorer(context);
|
||||||
if (parents == null) {
|
if (parents == null) {
|
||||||
throw new IllegalStateException("LeafReader " + context.reader() + " contains no parents!");
|
throw new IllegalStateException("LeafReader " + context.reader() + " contains no parents!");
|
||||||
}
|
}
|
||||||
parentBits = BitSet.of(parents, context.reader().maxDoc());
|
parentBits = BitSet.of(parents.iterator(), context.reader().maxDoc());
|
||||||
parentLeafComparators = new LeafFieldComparator[parentComparators.length];
|
parentLeafComparators = new LeafFieldComparator[parentComparators.length];
|
||||||
for (int i = 0; i < parentComparators.length; i++) {
|
for (int i = 0; i < parentComparators.length; i++) {
|
||||||
parentLeafComparators[i] = parentComparators[i].getLeafComparator(context);
|
parentLeafComparators[i] = parentComparators[i].getLeafComparator(context);
|
||||||
|
|
|
@ -27,9 +27,9 @@ import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.NumericDocValuesField;
|
import org.apache.lucene.document.NumericDocValuesField;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
import org.apache.lucene.search.BlockJoinComparatorSource;
|
import org.apache.lucene.search.BlockJoinComparatorSource;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
@ -37,7 +37,6 @@ import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.junit.Ignore;
|
|
||||||
|
|
||||||
public class TestBlockJoinSorter extends LuceneTestCase {
|
public class TestBlockJoinSorter extends LuceneTestCase {
|
||||||
|
|
||||||
|
@ -74,8 +73,8 @@ public class TestBlockJoinSorter extends LuceneTestCase {
|
||||||
final Query parentsFilter = new TermQuery(new Term("parent", "true"));
|
final Query parentsFilter = new TermQuery(new Term("parent", "true"));
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(parentsFilter, false);
|
final Weight weight = searcher.createNormalizedWeight(parentsFilter, false);
|
||||||
final DocIdSetIterator parents = weight.scorer(indexReader.leaves().get(0));
|
final Scorer parents = weight.scorer(indexReader.leaves().get(0));
|
||||||
final BitSet parentBits = BitSet.of(parents, reader.maxDoc());
|
final BitSet parentBits = BitSet.of(parents.iterator(), reader.maxDoc());
|
||||||
final NumericDocValues parentValues = reader.getNumericDocValues("parent_val");
|
final NumericDocValues parentValues = reader.getNumericDocValues("parent_val");
|
||||||
final NumericDocValues childValues = reader.getNumericDocValues("child_val");
|
final NumericDocValues childValues = reader.getNumericDocValues("child_val");
|
||||||
|
|
||||||
|
|
|
@ -112,9 +112,9 @@ public class BoostingQuery extends Query {
|
||||||
if (contextScorer == null) {
|
if (contextScorer == null) {
|
||||||
return matchScorer;
|
return matchScorer;
|
||||||
}
|
}
|
||||||
TwoPhaseIterator contextTwoPhase = contextScorer.asTwoPhaseIterator();
|
TwoPhaseIterator contextTwoPhase = contextScorer.twoPhaseIterator();
|
||||||
DocIdSetIterator contextApproximation = contextTwoPhase == null
|
DocIdSetIterator contextApproximation = contextTwoPhase == null
|
||||||
? contextScorer
|
? contextScorer.iterator()
|
||||||
: contextTwoPhase.approximation();
|
: contextTwoPhase.approximation();
|
||||||
return new FilterScorer(matchScorer) {
|
return new FilterScorer(matchScorer) {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -294,7 +294,7 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
||||||
int doc = docID();
|
int doc = docID();
|
||||||
if (doc > valSrcDocID) {
|
if (doc > valSrcDocID) {
|
||||||
for (Scorer valSrcScorer : valSrcScorers) {
|
for (Scorer valSrcScorer : valSrcScorers) {
|
||||||
valSrcScorer.advance(doc);
|
valSrcScorer.iterator().advance(doc);
|
||||||
}
|
}
|
||||||
valSrcDocID = doc;
|
valSrcDocID = doc;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Set;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
@ -94,7 +95,7 @@ public class FunctionQuery extends Query {
|
||||||
final FunctionWeight weight;
|
final FunctionWeight weight;
|
||||||
final int maxDoc;
|
final int maxDoc;
|
||||||
final float qWeight;
|
final float qWeight;
|
||||||
int doc=-1;
|
final DocIdSetIterator iterator;
|
||||||
final FunctionValues vals;
|
final FunctionValues vals;
|
||||||
|
|
||||||
public AllScorer(LeafReaderContext context, FunctionWeight w, float qWeight) throws IOException {
|
public AllScorer(LeafReaderContext context, FunctionWeight w, float qWeight) throws IOException {
|
||||||
|
@ -103,35 +104,23 @@ public class FunctionQuery extends Query {
|
||||||
this.qWeight = qWeight;
|
this.qWeight = qWeight;
|
||||||
this.reader = context.reader();
|
this.reader = context.reader();
|
||||||
this.maxDoc = reader.maxDoc();
|
this.maxDoc = reader.maxDoc();
|
||||||
|
iterator = DocIdSetIterator.all(context.reader().maxDoc());
|
||||||
vals = func.getValues(weight.context, context);
|
vals = func.getValues(weight.context, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DocIdSetIterator iterator() {
|
||||||
|
return iterator;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return doc;
|
return iterator.docID();
|
||||||
}
|
|
||||||
|
|
||||||
// instead of matching all docs, we could also embed a query.
|
|
||||||
// the score could either ignore the subscore, or boost it.
|
|
||||||
// Containment: floatline(foo:myTerm, "myFloatField", 1.0, 0.0f)
|
|
||||||
// Boost: foo:myTerm^floatline("myFloatField",1.0,0.0f)
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
++doc;
|
|
||||||
if (doc>=maxDoc) {
|
|
||||||
return doc=NO_MORE_DOCS;
|
|
||||||
}
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return slowAdvance(target);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
float score = qWeight * vals.floatVal(doc);
|
float score = qWeight * vals.floatVal(docID());
|
||||||
|
|
||||||
// Current Lucene priority queues can't handle NaN and -Infinity, so
|
// Current Lucene priority queues can't handle NaN and -Infinity, so
|
||||||
// map to -Float.MAX_VALUE. This conditional handles both -infinity
|
// map to -Float.MAX_VALUE. This conditional handles both -infinity
|
||||||
|
@ -139,11 +128,6 @@ public class FunctionQuery extends Query {
|
||||||
return score>Float.NEGATIVE_INFINITY ? score : -Float.MAX_VALUE;
|
return score>Float.NEGATIVE_INFINITY ? score : -Float.MAX_VALUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return maxDoc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int freq() throws IOException {
|
public int freq() throws IOException {
|
||||||
return 1;
|
return 1;
|
||||||
|
|
|
@ -132,7 +132,7 @@ public class FunctionRangeQuery extends Query {
|
||||||
// which can be slow since if that doc doesn't match, it has to linearly find the next matching
|
// which can be slow since if that doc doesn't match, it has to linearly find the next matching
|
||||||
ValueSourceScorer scorer = scorer(context);
|
ValueSourceScorer scorer = scorer(context);
|
||||||
if (scorer.matches(doc)) {
|
if (scorer.matches(doc)) {
|
||||||
scorer.advance(doc);
|
scorer.iterator().advance(doc);
|
||||||
return Explanation.match(scorer.score(), FunctionRangeQuery.this.toString(), functionValues.explain(doc));
|
return Explanation.match(scorer.score(), FunctionRangeQuery.this.toString(), functionValues.explain(doc));
|
||||||
} else {
|
} else {
|
||||||
return Explanation.noMatch(FunctionRangeQuery.this.toString(), functionValues.explain(doc));
|
return Explanation.noMatch(FunctionRangeQuery.this.toString(), functionValues.explain(doc));
|
||||||
|
|
|
@ -48,10 +48,11 @@ public abstract class ValueSourceScorer extends Scorer {
|
||||||
protected ValueSourceScorer(IndexReader reader, FunctionValues values) {
|
protected ValueSourceScorer(IndexReader reader, FunctionValues values) {
|
||||||
super(null);//no weight
|
super(null);//no weight
|
||||||
this.values = values;
|
this.values = values;
|
||||||
this.twoPhaseIterator = new TwoPhaseIterator(DocIdSetIterator.all(reader.maxDoc())) { // no approximation!
|
final DocIdSetIterator approximation = DocIdSetIterator.all(reader.maxDoc()); // no approximation!
|
||||||
|
this.twoPhaseIterator = new TwoPhaseIterator(approximation) {
|
||||||
@Override
|
@Override
|
||||||
public boolean matches() throws IOException {
|
public boolean matches() throws IOException {
|
||||||
return ValueSourceScorer.this.matches(docID());
|
return ValueSourceScorer.this.matches(approximation.docID());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -66,7 +67,12 @@ public abstract class ValueSourceScorer extends Scorer {
|
||||||
public abstract boolean matches(int doc);
|
public abstract boolean matches(int doc);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TwoPhaseIterator asTwoPhaseIterator() {
|
public DocIdSetIterator iterator() {
|
||||||
|
return disi;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TwoPhaseIterator twoPhaseIterator() {
|
||||||
return twoPhaseIterator;
|
return twoPhaseIterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,16 +81,6 @@ public abstract class ValueSourceScorer extends Scorer {
|
||||||
return disi.docID();
|
return disi.docID();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return disi.nextDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return disi.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
// (same as FunctionQuery, but no qWeight) TODO consider adding configurable qWeight
|
// (same as FunctionQuery, but no qWeight) TODO consider adding configurable qWeight
|
||||||
|
@ -100,8 +96,4 @@ public abstract class ValueSourceScorer extends Scorer {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return disi.cost();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,11 +25,11 @@ import org.apache.lucene.index.ReaderUtil;
|
||||||
import org.apache.lucene.queries.function.FunctionValues;
|
import org.apache.lucene.queries.function.FunctionValues;
|
||||||
import org.apache.lucene.queries.function.ValueSource;
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
import org.apache.lucene.queries.function.docvalues.FloatDocValues;
|
import org.apache.lucene.queries.function.docvalues.FloatDocValues;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.Bits;
|
|
||||||
import org.apache.lucene.util.mutable.MutableValue;
|
import org.apache.lucene.util.mutable.MutableValue;
|
||||||
import org.apache.lucene.util.mutable.MutableValueFloat;
|
import org.apache.lucene.util.mutable.MutableValueFloat;
|
||||||
|
|
||||||
|
@ -86,6 +86,7 @@ class QueryDocValues extends FloatDocValues {
|
||||||
final Query q;
|
final Query q;
|
||||||
|
|
||||||
Scorer scorer;
|
Scorer scorer;
|
||||||
|
DocIdSetIterator it;
|
||||||
int scorerDoc; // the document the scorer is on
|
int scorerDoc; // the document the scorer is on
|
||||||
boolean noMatches=false;
|
boolean noMatches=false;
|
||||||
|
|
||||||
|
@ -129,12 +130,13 @@ class QueryDocValues extends FloatDocValues {
|
||||||
noMatches = true;
|
noMatches = true;
|
||||||
return defVal;
|
return defVal;
|
||||||
}
|
}
|
||||||
|
it = scorer.iterator();
|
||||||
scorerDoc = -1;
|
scorerDoc = -1;
|
||||||
}
|
}
|
||||||
lastDocRequested = doc;
|
lastDocRequested = doc;
|
||||||
|
|
||||||
if (scorerDoc < doc) {
|
if (scorerDoc < doc) {
|
||||||
scorerDoc = scorer.advance(doc);
|
scorerDoc = it.advance(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scorerDoc > doc) {
|
if (scorerDoc > doc) {
|
||||||
|
@ -161,11 +163,12 @@ class QueryDocValues extends FloatDocValues {
|
||||||
noMatches = true;
|
noMatches = true;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
it = scorer.iterator();
|
||||||
}
|
}
|
||||||
lastDocRequested = doc;
|
lastDocRequested = doc;
|
||||||
|
|
||||||
if (scorerDoc < doc) {
|
if (scorerDoc < doc) {
|
||||||
scorerDoc = scorer.advance(doc);
|
scorerDoc = it.advance(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scorerDoc > doc) {
|
if (scorerDoc > doc) {
|
||||||
|
@ -221,10 +224,11 @@ class QueryDocValues extends FloatDocValues {
|
||||||
mval.exists = false;
|
mval.exists = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
it = scorer.iterator();
|
||||||
lastDocRequested = doc;
|
lastDocRequested = doc;
|
||||||
|
|
||||||
if (scorerDoc < doc) {
|
if (scorerDoc < doc) {
|
||||||
scorerDoc = scorer.advance(doc);
|
scorerDoc = it.advance(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scorerDoc > doc) {
|
if (scorerDoc > doc) {
|
||||||
|
|
|
@ -27,11 +27,13 @@ import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.index.TermContext;
|
import org.apache.lucene.index.TermContext;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
|
import org.apache.lucene.search.similarities.Similarity.SimScorer;
|
||||||
|
import org.apache.lucene.search.spans.FilterSpans;
|
||||||
import org.apache.lucene.search.spans.SpanCollector;
|
import org.apache.lucene.search.spans.SpanCollector;
|
||||||
import org.apache.lucene.search.spans.SpanQuery;
|
import org.apache.lucene.search.spans.SpanQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanScorer;
|
||||||
import org.apache.lucene.search.spans.SpanWeight;
|
import org.apache.lucene.search.spans.SpanWeight;
|
||||||
import org.apache.lucene.search.spans.Spans;
|
import org.apache.lucene.search.spans.Spans;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -132,11 +134,13 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public PayloadSpanScorer scorer(LeafReaderContext context) throws IOException {
|
||||||
Spans spans = getSpans(context, Postings.PAYLOADS);
|
Spans spans = getSpans(context, Postings.PAYLOADS);
|
||||||
if (spans == null)
|
if (spans == null)
|
||||||
return null;
|
return null;
|
||||||
return new PayloadSpans(spans, this, innerWeight.getSimScorer(context));
|
SimScorer docScorer = innerWeight.getSimScorer(context);
|
||||||
|
PayloadSpans payloadSpans = new PayloadSpans(spans, docScorer);
|
||||||
|
return new PayloadSpanScorer(this, payloadSpans, docScorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -156,8 +160,8 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
PayloadSpans scorer = (PayloadSpans) scorer(context);
|
PayloadSpanScorer scorer = scorer(context);
|
||||||
if (scorer == null || scorer.advance(doc) != doc)
|
if (scorer == null || scorer.iterator().advance(doc) != doc)
|
||||||
return Explanation.noMatch("No match");
|
return Explanation.noMatch("No match");
|
||||||
|
|
||||||
scorer.freq(); // force freq calculation
|
scorer.freq(); // force freq calculation
|
||||||
|
@ -173,40 +177,20 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class PayloadSpans extends Spans implements SpanCollector {
|
private class PayloadSpans extends FilterSpans implements SpanCollector {
|
||||||
|
|
||||||
private int payloadsSeen;
|
private final SimScorer docScorer;
|
||||||
private float payloadScore;
|
public int payloadsSeen;
|
||||||
private final Spans in;
|
public float payloadScore;
|
||||||
|
|
||||||
private PayloadSpans(Spans spans, SpanWeight weight, Similarity.SimScorer docScorer) throws IOException {
|
private PayloadSpans(Spans in, SimScorer docScorer) {
|
||||||
super(weight, docScorer);
|
super(in);
|
||||||
this.in = spans;
|
this.docScorer = docScorer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextStartPosition() throws IOException {
|
protected AcceptStatus accept(Spans candidate) throws IOException {
|
||||||
return in.nextStartPosition();
|
return AcceptStatus.YES;
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int startPosition() {
|
|
||||||
return in.startPosition();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int endPosition() {
|
|
||||||
return in.endPosition();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int width() {
|
|
||||||
return in.width();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(SpanCollector collector) throws IOException {
|
|
||||||
in.collect(collector);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -215,11 +199,6 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
payloadsSeen = 0;
|
payloadsSeen = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doCurrentSpans() throws IOException {
|
|
||||||
in.collect(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collectLeaf(PostingsEnum postings, int position, Term term) throws IOException {
|
public void collectLeaf(PostingsEnum postings, int position, Term term) throws IOException {
|
||||||
BytesRef payload = postings.getPayload();
|
BytesRef payload = postings.getPayload();
|
||||||
|
@ -231,12 +210,30 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
payloadsSeen++;
|
payloadsSeen++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void reset() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doCurrentSpans() throws IOException {
|
||||||
|
in.collect(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private class PayloadSpanScorer extends SpanScorer {
|
||||||
|
|
||||||
|
private final PayloadSpans spans;
|
||||||
|
|
||||||
|
private PayloadSpanScorer(SpanWeight weight, PayloadSpans spans, Similarity.SimScorer docScorer) throws IOException {
|
||||||
|
super(weight, spans, docScorer);
|
||||||
|
this.spans = spans;
|
||||||
|
}
|
||||||
|
|
||||||
protected float getPayloadScore() {
|
protected float getPayloadScore() {
|
||||||
return function.docScore(docID(), getField(), payloadsSeen, payloadScore);
|
return function.docScore(docID(), getField(), spans.payloadsSeen, spans.payloadScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Explanation getPayloadExplanation() {
|
protected Explanation getPayloadExplanation() {
|
||||||
return function.explain(docID(), getField(), payloadsSeen, payloadScore);
|
return function.explain(docID(), getField(), spans.payloadsSeen, spans.payloadScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected float getSpanScore() throws IOException {
|
protected float getSpanScore() throws IOException {
|
||||||
|
@ -250,35 +247,6 @@ public class PayloadScoreQuery extends SpanQuery {
|
||||||
return getPayloadScore();
|
return getPayloadScore();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void reset() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return in.docID();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int nextDoc() throws IOException {
|
|
||||||
return in.nextDoc();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int advance(int target) throws IOException {
|
|
||||||
return in.advance(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long cost() {
|
|
||||||
return in.cost();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public float positionsCost() {
|
|
||||||
return in.positionsCost();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue