LUCENE-6226: Query.createWeight() takes a param indicating which postings values should be read from the index

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1658373 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alan Woodward 2015-02-09 11:46:07 +00:00
parent 248121bb3f
commit ef2141f6bf
67 changed files with 361 additions and 154 deletions

View File

@ -88,6 +88,10 @@ API Changes
* LUCENE-6223: Move BooleanQuery.BooleanWeight to BooleanWeight.
(Robert Muir)
* LUCENE-6226: Query.createWeight() and IndexSearcher.createNormalizedWeight()
now take an integer bitmask indicating which postings values should be
retrieved from the index. (Alan Woodward, Adrien Grand)
Other
* LUCENE-6193: Collapse identical catch branches in try-catch statements.

View File

@ -92,17 +92,16 @@ public abstract class PostingsEnum extends DocIdSetIterator {
}
/**
* Returns the next position. If there are no more
* positions, or the iterator does not support positions,
* this will return DocsEnum.NO_MORE_POSITIONS */
* Returns the next position, or -1 if positions are not available.
* Should only be called up to freq() times */
public abstract int nextPosition() throws IOException;
/** Returns start offset for the current position, or -1
* if offsets were not indexed. */
* if offsets are not available. */
public abstract int startOffset() throws IOException;
/** Returns end offset for the current position, or -1 if
* offsets were not indexed. */
* offsets are not available. */
public abstract int endOffset() throws IOException;
/** Returns the payload at this position, or null if no

View File

@ -163,8 +163,8 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
public final Iterator<BooleanClause> iterator() { return clauses().iterator(); }
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(this, searcher, needsScores, disableCoord);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new BooleanWeight(this, searcher, postingsFlags, disableCoord);
}
@Override

View File

@ -24,6 +24,7 @@ import java.util.Iterator;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
@ -43,17 +44,17 @@ public class BooleanWeight extends Weight {
private final boolean disableCoord;
private final boolean needsScores;
public BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores, boolean disableCoord) throws IOException {
public BooleanWeight(BooleanQuery query, IndexSearcher searcher, int postingsFlags, boolean disableCoord) throws IOException {
super(query);
this.query = query;
this.needsScores = needsScores;
this.needsScores = (postingsFlags & PostingsEnum.FLAG_FREQS) != 0;
this.similarity = searcher.getSimilarity();
this.disableCoord = disableCoord;
weights = new ArrayList<>(query.clauses().size());
for (int i = 0 ; i < query.clauses().size(); i++) {
BooleanClause c = query.clauses().get(i);
final boolean queryNeedsScores = needsScores && c.getOccur() != Occur.MUST_NOT;
Weight w = c.getQuery().createWeight(searcher, queryNeedsScores);
final int subQueryFlags = c.getOccur() == Occur.MUST_NOT ? PostingsEnum.FLAG_NONE : postingsFlags;
Weight w = c.getQuery().createWeight(searcher, subQueryFlags);
weights.add(w);
if (!c.isProhibited()) {
maxCoord++;

View File

@ -24,6 +24,7 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@ -111,7 +112,7 @@ public class ConstantScoreQuery extends Query {
public ConstantWeight(IndexSearcher searcher) throws IOException {
super(ConstantScoreQuery.this);
this.innerWeight = (query == null) ? null : query.createWeight(searcher, false);
this.innerWeight = (query == null) ? null : query.createWeight(searcher, PostingsEnum.FLAG_NONE);
}
@Override
@ -327,7 +328,7 @@ public class ConstantScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new ConstantScoreQuery.ConstantWeight(searcher);
}

View File

@ -118,10 +118,10 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
protected ArrayList<Weight> weights = new ArrayList<>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts
/** Construct the Weight for this Query searched by searcher. Recursively construct subquery weights. */
public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public DisjunctionMaxWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
super(DisjunctionMaxQuery.this);
for (Query disjunctQuery : disjuncts) {
weights.add(disjunctQuery.createWeight(searcher, needsScores));
weights.add(disjunctQuery.createWeight(searcher, postingsFlags));
}
}
@ -194,8 +194,8 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/** Create the Weight used to score us */
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new DisjunctionMaxWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new DisjunctionMaxWeight(searcher, postingsFlags);
}
/** Optimize our representation and our subqueries representations

View File

@ -78,8 +78,8 @@ public class FilteredQuery extends Query {
* This is accomplished by overriding the Scorer returned by the Weight.
*/
@Override
public Weight createWeight(final IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight weight = query.createWeight (searcher, needsScores);
public Weight createWeight(final IndexSearcher searcher, int postingsFlags) throws IOException {
final Weight weight = query.createWeight (searcher, postingsFlags);
return new Weight(FilteredQuery.this) {
@Override

View File

@ -33,6 +33,7 @@ import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.IndexWriter; // javadocs
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.StoredFieldVisitor;
@ -240,13 +241,14 @@ public class IndexSearcher {
return collector.topDocs();
} else {
final TopScoreDocCollector[] collectors = new TopScoreDocCollector[leafSlices.length];
boolean needsScores = false;
int postingsFlags = PostingsEnum.FLAG_NONE;
for (int i = 0; i < leafSlices.length; ++i) {
collectors[i] = TopScoreDocCollector.create(numHits, after);
needsScores |= collectors[i].needsScores();
if (collectors[i].needsScores())
postingsFlags |= PostingsEnum.FLAG_FREQS;
}
final Weight weight = createNormalizedWeight(query, needsScores);
final Weight weight = createNormalizedWeight(query, postingsFlags);
final List<Future<TopDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; ++i) {
final LeafReaderContext[] leaves = leafSlices[i].leaves;
@ -338,7 +340,8 @@ public class IndexSearcher {
*/
public void search(Query query, Collector results)
throws IOException {
search(leafContexts, createNormalizedWeight(query, results.needsScores()), results);
int postingsFlags = results.needsScores() ? PostingsEnum.FLAG_FREQS : PostingsEnum.FLAG_NONE;
search(leafContexts, createNormalizedWeight(query, postingsFlags), results);
}
/** Search implementation with arbitrary sorting. Finds
@ -462,13 +465,14 @@ public class IndexSearcher {
return collector.topDocs();
} else {
final TopFieldCollector[] collectors = new TopFieldCollector[leafSlices.length];
boolean needsScores = false;
int postingsFlags = PostingsEnum.FLAG_NONE;
for (int i = 0; i < leafSlices.length; ++i) {
collectors[i] = TopFieldCollector.create(sort, numHits, after, fillFields, doDocScores, doMaxScore);
needsScores |= collectors[i].needsScores();
if (collectors[i].needsScores())
postingsFlags |= PostingsEnum.FLAG_FREQS;
}
final Weight weight = createNormalizedWeight(query, needsScores);
final Weight weight = createNormalizedWeight(query, postingsFlags);
final List<Future<TopFieldDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; ++i) {
final LeafReaderContext[] leaves = leafSlices[i].leaves;
@ -565,7 +569,7 @@ public class IndexSearcher {
* entire index.
*/
public Explanation explain(Query query, int doc) throws IOException {
return explain(createNormalizedWeight(query, true), doc);
return explain(createNormalizedWeight(query, PostingsEnum.FLAG_FREQS), doc);
}
/** Expert: low-level implementation method
@ -595,9 +599,9 @@ public class IndexSearcher {
* can then directly be used to get a {@link Scorer}.
* @lucene.internal
*/
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
public Weight createNormalizedWeight(Query query, int postingsFlags) throws IOException {
query = rewrite(query);
Weight weight = query.createWeight(this, needsScores);
Weight weight = query.createWeight(this, postingsFlags);
float v = weight.getValueForNormalization();
float norm = getSimilarity().queryNorm(v);
if (Float.isInfinite(norm) || Float.isNaN(norm)) {

View File

@ -150,7 +150,7 @@ public class MatchAllDocsQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) {
return new MatchAllDocsWeight(searcher);
}

View File

@ -142,11 +142,13 @@ public class MultiPhraseQuery extends Query {
private final Similarity.SimWeight stats;
private final Map<Term,TermContext> termContexts = new HashMap<>();
private final boolean needsScores;
private final int postingsFlags;
public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores)
public MultiPhraseWeight(IndexSearcher searcher, int postingsFlags)
throws IOException {
super(MultiPhraseQuery.this);
this.needsScores = needsScores;
this.needsScores = (postingsFlags & PostingsEnum.FLAG_FREQS) != 0;
this.postingsFlags = postingsFlags | PostingsEnum.FLAG_POSITIONS;
this.similarity = searcher.getSimilarity();
final IndexReaderContext context = searcher.getTopReaderContext();
@ -228,7 +230,7 @@ public class MultiPhraseQuery extends Query {
return null;
}
termsEnum.seekExact(term.bytes(), termState);
postingsEnum = termsEnum.postings(liveDocs, null, PostingsEnum.FLAG_POSITIONS);
postingsEnum = termsEnum.postings(liveDocs, null, postingsFlags);
if (postingsEnum == null) {
// term does exist, but has no positions
@ -296,8 +298,8 @@ public class MultiPhraseQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new MultiPhraseWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new MultiPhraseWeight(searcher, postingsFlags);
}
/** Prints a user-readable version of this query. */

View File

@ -211,13 +211,15 @@ public class PhraseQuery extends Query {
private class PhraseWeight extends Weight {
private final Similarity similarity;
private final Similarity.SimWeight stats;
private final int postingsFlags;
private final boolean needsScores;
private transient TermContext states[];
public PhraseWeight(IndexSearcher searcher, boolean needsScores)
public PhraseWeight(IndexSearcher searcher, int postingsFlags)
throws IOException {
super(PhraseQuery.this);
this.needsScores = needsScores;
this.postingsFlags = postingsFlags | PostingsEnum.FLAG_POSITIONS;
this.needsScores = (postingsFlags & PostingsEnum.FLAG_FREQS) != 0;
this.similarity = searcher.getSimilarity();
final IndexReaderContext context = searcher.getTopReaderContext();
states = new TermContext[terms.size()];
@ -266,7 +268,7 @@ public class PhraseQuery extends Query {
return null;
}
te.seekExact(t.bytes(), state);
PostingsEnum postingsEnum = te.postings(liveDocs, null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum postingsEnum = te.postings(liveDocs, null, postingsFlags);
// PhraseQuery on a field that did not index
// positions.
@ -318,8 +320,8 @@ public class PhraseQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new PhraseWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new PhraseWeight(searcher, postingsFlags);
}
/**

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
/** The abstract base class for queries.
@ -74,10 +75,10 @@ public abstract class Query implements Cloneable {
* <p>
* Only implemented by primitive queries, which re-write to themselves.
*
* @param needsScores True if document scores ({@link Scorer#score}) or match
* frequencies ({@link Scorer#freq}) are needed.
* @param postingsFlags Bitmask indicating which postings features should be returned
* by this query (see {@link PostingsEnum})
*/
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
throw new UnsupportedOperationException("Query " + this + " does not implement createWeight");
}

View File

@ -61,7 +61,7 @@ public abstract class QueryRescorer extends Rescorer {
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
Weight weight = searcher.createNormalizedWeight(query, true);
Weight weight = searcher.createNormalizedWeight(query, PostingsEnum.FLAG_FREQS);
// Now merge sort docIDs from hits, with reader's leaves:
int hitUpto = 0;

View File

@ -53,7 +53,7 @@ public class QueryWrapperFilter extends Filter {
public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException {
// get a private context that is used to rewrite, createWeight and score eventually
final LeafReaderContext privateContext = context.reader().getContext();
final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, false);
final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, PostingsEnum.FLAG_NONE);
return new DocIdSet() {
@Override
public DocIdSetIterator iterator() throws IOException {

View File

@ -73,7 +73,7 @@ public abstract class Scorer extends PostingsEnum {
public Collection<ChildScorer> getChildren() {
return Collections.emptyList();
}
/** A child Scorer and its relationship to its parent.
* the meaning of the relationship depends upon the parent query.
* @lucene.experimental */

View File

@ -47,12 +47,12 @@ public class TermQuery extends Query {
private final Similarity similarity;
private final Similarity.SimWeight stats;
private final TermContext termStates;
private final boolean needsScores;
private final int postingsFlags;
public TermWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates)
public TermWeight(IndexSearcher searcher, int postingsFlags, TermContext termStates)
throws IOException {
super(TermQuery.this);
this.needsScores = needsScores;
this.postingsFlags = postingsFlags;
assert termStates != null : "TermContext must not be null";
this.termStates = termStates;
this.similarity = searcher.getSimilarity();
@ -83,7 +83,7 @@ public class TermQuery extends Query {
if (termsEnum == null) {
return null;
}
PostingsEnum docs = termsEnum.postings(acceptDocs, null, needsScores ? PostingsEnum.FLAG_FREQS : PostingsEnum.FLAG_NONE);
PostingsEnum docs = termsEnum.postings(acceptDocs, null, postingsFlags);
assert docs != null;
return new TermScorer(this, docs, similarity.simScorer(stats, context));
}
@ -168,7 +168,7 @@ public class TermQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
final IndexReaderContext context = searcher.getTopReaderContext();
final TermContext termState;
if (perReaderTermState == null
@ -184,7 +184,7 @@ public class TermQuery extends Query {
// we must not ignore the given docFreq - if set use the given value (lie)
if (docFreq != -1) termState.setDocFreq(docFreq);
return new TermWeight(searcher, needsScores, termState);
return new TermWeight(searcher, postingsFlags, termState);
}
@Override

View File

@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRef;
/** Expert: A <code>Scorer</code> for documents matching a <code>Term</code>.
*/
final class TermScorer extends Scorer {
private final PostingsEnum postingsEnum;
private final Similarity.SimScorer docScorer;

View File

@ -41,7 +41,7 @@ import org.apache.lucene.util.Bits;
* A <code>Weight</code> is used in the following way:
* <ol>
* <li>A <code>Weight</code> is constructed by a top-level query, given a
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean)}).
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, int)}).
* <li>The {@link #getValueForNormalization()} method is called on the
* <code>Weight</code> to compute the query normalization factor
* {@link Similarity#queryNorm(float)} of the query clauses contained in the

View File

@ -358,7 +358,7 @@
* {@link org.apache.lucene.search.Query Query} class has several methods that are important for
* derived classes:
* <ol>
* <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)} &mdash; A
* <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,int) createWeight(IndexSearcher searcher,boolean)} &mdash; A
* {@link org.apache.lucene.search.Weight Weight} is the internal representation of the
* Query, so each Query implementation must
* provide an implementation of Weight. See the subsection on <a
@ -367,7 +367,7 @@
* <li>{@link org.apache.lucene.search.Query#rewrite(org.apache.lucene.index.IndexReader) rewrite(IndexReader reader)} &mdash; Rewrites queries into primitive queries. Primitive queries are:
* {@link org.apache.lucene.search.TermQuery TermQuery},
* {@link org.apache.lucene.search.BooleanQuery BooleanQuery}, <span
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)}</span></li>
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,int) createWeight(IndexSearcher searcher,boolean)}</span></li>
* </ol>
* <a name="weightClass"></a>
* <h3>The Weight Interface</h3>
@ -509,7 +509,7 @@
* <p>Assuming we are not sorting (since sorting doesn't affect the raw Lucene score),
* we call one of the search methods of the IndexSearcher, passing in the
* {@link org.apache.lucene.search.Weight Weight} object created by
* {@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query,boolean)
* {@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query,int)
* IndexSearcher.createNormalizedWeight(Query,boolean)},
* {@link org.apache.lucene.search.Filter Filter} and the number of results we want.
* This method returns a {@link org.apache.lucene.search.TopDocs TopDocs} object,

View File

@ -71,7 +71,7 @@ public class PayloadNearQuery extends SpanNearQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new PayloadNearSpanWeight(this, searcher);
}

View File

@ -67,7 +67,7 @@ public class PayloadTermQuery extends SpanTermQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new PayloadTermWeight(this, searcher);
}

View File

@ -106,8 +106,8 @@ public class FieldMaskingSpanQuery extends SpanQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return maskedQuery.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return maskedQuery.createWeight(searcher, postingsFlags);
}
@Override

View File

@ -42,7 +42,7 @@ public abstract class SpanQuery extends Query {
public abstract String getField();
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new SpanWeight(this, searcher);
}

View File

@ -264,6 +264,11 @@ final class JustCompileSearch {
throw new UnsupportedOperationException( UNSUPPORTED_MSG );
}
@Override
public boolean needsScores() {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
@Override
public TopDocs topDocs() {
throw new UnsupportedOperationException( UNSUPPORTED_MSG );
@ -278,11 +283,6 @@ final class JustCompileSearch {
public TopDocs topDocs( int start, int end ) {
throw new UnsupportedOperationException( UNSUPPORTED_MSG );
}
@Override
public boolean needsScores() {
throw new UnsupportedOperationException( UNSUPPORTED_MSG );
}
}
static final class JustCompileWeight extends Weight {

View File

@ -28,6 +28,7 @@ import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory;
@ -706,7 +707,7 @@ public class TestBooleanCoord extends LuceneTestCase {
/** asserts score for our single matching good doc */
private void assertScore(final float expected, Query query) throws Exception {
// test in-order
Weight weight = searcher.createNormalizedWeight(query, true);
Weight weight = searcher.createNormalizedWeight(query, PostingsEnum.FLAG_FREQS);
Scorer scorer = weight.scorer(reader.leaves().get(0), null);
assertTrue(scorer.docID() == -1 || scorer.docID() == DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, scorer.nextDoc());

View File

@ -185,7 +185,7 @@ public class TestBooleanOr extends LuceneTestCase {
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
Weight w = s.createNormalizedWeight(bq, true);
Weight w = s.createNormalizedWeight(bq, PostingsEnum.FLAG_FREQS);
assertEquals(1, s.getIndexReader().leaves().size());
BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null);

View File

@ -34,6 +34,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.similarities.DefaultSimilarity;
@ -231,7 +232,7 @@ public class TestBooleanQuery extends LuceneTestCase {
q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD));
}
Weight weight = s.createNormalizedWeight(q, true);
Weight weight = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
Scorer scorer = weight.scorer(s.leafContexts.get(0), null);
@ -249,7 +250,7 @@ public class TestBooleanQuery extends LuceneTestCase {
// verify exact match:
for(int iter2=0;iter2<10;iter2++) {
weight = s.createNormalizedWeight(q, true);
weight = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
scorer = weight.scorer(s.leafContexts.get(0), null);
if (VERBOSE) {

View File

@ -282,8 +282,8 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
static class BooleanQuery2 extends BooleanQuery {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(this, searcher, needsScores, false) {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new BooleanWeight(this, searcher, postingsFlags, false) {
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer scorer = scorer(context, acceptDocs);

View File

@ -70,7 +70,7 @@ public class TestBooleanScorer extends LuceneTestCase {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new Weight(CrazyMustUseBulkScorerQuery.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) {

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.similarities.DefaultSimilarity;
@ -67,7 +68,7 @@ public class TestConstantScoreQuery extends LuceneTestCase {
assertEquals("Score differs from expected", expectedScore, this.scorer.score(), 0);
count[0]++;
}
@Override
public boolean needsScores() {
return true;

View File

@ -34,6 +34,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.StoredDocument;
@ -178,7 +179,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
QueryUtils.check(random(), dq, s);
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
final Weight dw = s.createNormalizedWeight(dq, true);
final Weight dw = s.createNormalizedWeight(dq, PostingsEnum.FLAG_FREQS);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs());
final boolean skipOk = ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS;
@ -194,7 +195,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
dq.add(tq("dek", "DOES_NOT_EXIST"));
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
QueryUtils.check(random(), dq, s);
final Weight dw = s.createNormalizedWeight(dq, true);
final Weight dw = s.createNormalizedWeight(dq, PostingsEnum.FLAG_FREQS);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs());
assertTrue("firsttime skipTo found no match",

View File

@ -31,6 +31,7 @@ import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
@ -124,7 +125,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
}
bq.setMinimumNumberShouldMatch(minShouldMatch);
BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq, true);
BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq, PostingsEnum.FLAG_FREQS);
switch (mode) {
case DOC_VALUES:

View File

@ -30,6 +30,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum;
@ -344,7 +345,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
MultiPhraseQuery query = new MultiPhraseQuery();
query.add(new Term[] { new Term("body", "this"), new Term("body", "that") });
query.add(new Term("body", "is"));
Weight weight = query.createWeight(searcher, true);
Weight weight = query.createWeight(searcher, PostingsEnum.FLAG_FREQS);
assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f);
writer.close();

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
@ -63,15 +64,15 @@ public class TestNeedsScores extends LuceneTestCase {
Query required = new TermQuery(new Term("field", "this"));
Query prohibited = new TermQuery(new Term("field", "3"));
BooleanQuery bq = new BooleanQuery();
bq.add(new AssertNeedsScores(required, true), BooleanClause.Occur.MUST);
bq.add(new AssertNeedsScores(prohibited, false), BooleanClause.Occur.MUST_NOT);
bq.add(new AssertNeedsScores(required, PostingsEnum.FLAG_FREQS), BooleanClause.Occur.MUST);
bq.add(new AssertNeedsScores(prohibited, PostingsEnum.FLAG_NONE), BooleanClause.Occur.MUST_NOT);
assertEquals(4, searcher.search(bq, 5).totalHits); // we exclude 3
}
/** nested inside constant score query */
public void testConstantScoreQuery() throws Exception {
Query term = new TermQuery(new Term("field", "this"));
Query constantScore = new ConstantScoreQuery(new AssertNeedsScores(term, false));
Query constantScore = new ConstantScoreQuery(new AssertNeedsScores(term, PostingsEnum.FLAG_NONE));
assertEquals(5, searcher.search(constantScore, 5).totalHits);
}
@ -79,38 +80,38 @@ public class TestNeedsScores extends LuceneTestCase {
public void testQueryWrapperFilter() throws Exception {
Query query = new MatchAllDocsQuery();
Query term = new TermQuery(new Term("field", "this"));
Filter filter = new QueryWrapperFilter(new AssertNeedsScores(term, false));
Filter filter = new QueryWrapperFilter(new AssertNeedsScores(term, PostingsEnum.FLAG_NONE));
assertEquals(5, searcher.search(query, filter, 5).totalHits);
}
/** when not sorting by score */
public void testSortByField() throws Exception {
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), false);
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), PostingsEnum.FLAG_NONE);
assertEquals(5, searcher.search(query, 5, Sort.INDEXORDER).totalHits);
}
/** when sorting by score */
public void testSortByScore() throws Exception {
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), true);
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), PostingsEnum.FLAG_FREQS);
assertEquals(5, searcher.search(query, 5, Sort.RELEVANCE).totalHits);
}
/**
* Wraps a query, checking that the needsScores param
* Wraps a query, checking that the postingsFlags param
* passed to Weight.scorer is the expected value.
*/
static class AssertNeedsScores extends Query {
final Query in;
final boolean value;
final int value;
AssertNeedsScores(Query in, boolean value) {
AssertNeedsScores(Query in, int value) {
this.in = in;
this.value = value;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight w = in.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
final Weight w = in.createWeight(searcher, postingsFlags);
return new Weight(AssertNeedsScores.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
@ -129,7 +130,7 @@ public class TestNeedsScores extends LuceneTestCase {
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assertEquals("query=" + in, value, needsScores);
assertEquals("query=" + in, value, postingsFlags);
return w.scorer(context, acceptDocs);
}
};
@ -155,7 +156,7 @@ public class TestNeedsScores extends LuceneTestCase {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((in == null) ? 0 : in.hashCode());
result = prime * result + (value ? 1231 : 1237);
result = prime * result + (value * 37);
return result;
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
@ -107,7 +108,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, PostingsEnum.FLAG_FREQS);
Scorer s = new SimpleScorer(fake);
TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length);
Collector c = new PositiveScoresOnlyCollector(tdc);

View File

@ -425,7 +425,7 @@ public class TestQueryRescorer extends LuceneTestCase {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new Weight(FixedScoreQuery.this) {

View File

@ -20,6 +20,7 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
@ -130,7 +131,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, PostingsEnum.FLAG_FREQS);
Scorer s = new SimpleScorer(fake);
ScoreCachingCollector scc = new ScoreCachingCollector(scores.length);
scc.setScorer(s);

View File

@ -0,0 +1,172 @@
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.payloads.PayloadHelper;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
import java.io.IOException;
public class TestTermQueryPostings extends LuceneTestCase {
private static final String FIELD = "f";
private static final String[] DOC_FIELDS = new String[]{
"a b c d",
"a a a a",
"c d e f",
"b d a g"
};
@Test
public void testTermQueryPositions() throws IOException {
Directory directory = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config);
for (String content : DOC_FIELDS) {
Document doc = new Document();
doc.add(newField(FIELD, content, TextField.TYPE_NOT_STORED));
writer.addDocument(doc);
}
IndexReader reader = SlowCompositeReaderWrapper.wrap(writer.getReader());
IndexSearcher searcher = new IndexSearcher(reader);
writer.close();
TermQuery tq = new TermQuery(new Term(FIELD, "a"));
Weight weight = searcher.createNormalizedWeight(tq, PostingsEnum.FLAG_POSITIONS);
LeafReaderContext ctx = (LeafReaderContext) searcher.getTopReaderContext();
Scorer scorer = weight.scorer(ctx, null);
assertEquals(scorer.nextDoc(), 0);
assertEquals(scorer.nextPosition(), 0);
assertEquals(scorer.nextDoc(), 1);
assertEquals(scorer.nextPosition(), 0);
assertEquals(scorer.nextPosition(), 1);
assertEquals(scorer.nextPosition(), 2);
assertEquals(scorer.nextPosition(), 3);
assertEquals(scorer.nextDoc(), 3);
assertEquals(scorer.nextPosition(), 2);
reader.close();
directory.close();
}
@Test
public void testTermQueryOffsets() throws IOException {
Directory directory = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config);
FieldType fieldType = new FieldType(TextField.TYPE_NOT_STORED);
fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
for (String content : DOC_FIELDS) {
Document doc = new Document();
doc.add(newField(FIELD, content, fieldType));
writer.addDocument(doc);
}
IndexReader reader = SlowCompositeReaderWrapper.wrap(writer.getReader());
IndexSearcher searcher = new IndexSearcher(reader);
writer.close();
TermQuery tq = new TermQuery(new Term(FIELD, "a"));
Weight weight = searcher.createNormalizedWeight(tq, PostingsEnum.FLAG_OFFSETS);
LeafReaderContext ctx = (LeafReaderContext) searcher.getTopReaderContext();
Scorer scorer = weight.scorer(ctx, null);
assertEquals(scorer.nextDoc(), 0);
assertEquals(scorer.nextPosition(), 0);
assertEquals(scorer.startOffset(), 0);
assertEquals(scorer.endOffset(), 1);
assertEquals(scorer.nextDoc(), 1);
assertEquals(scorer.nextPosition(), 0);
assertEquals(scorer.startOffset(), 0);
assertEquals(scorer.endOffset(), 1);
assertEquals(scorer.nextPosition(), 1);
assertEquals(scorer.startOffset(), 2);
assertEquals(scorer.endOffset(), 3);
assertEquals(scorer.nextPosition(), 2);
assertEquals(scorer.startOffset(), 4);
assertEquals(scorer.endOffset(), 5);
assertEquals(scorer.nextPosition(), 3);
assertEquals(scorer.startOffset(), 6);
assertEquals(scorer.endOffset(), 7);
assertEquals(scorer.nextDoc(), 3);
assertEquals(scorer.nextPosition(), 2);
assertEquals(scorer.startOffset(), 4);
assertEquals(scorer.endOffset(), 5);
reader.close();
directory.close();
}
@Test
public void testTermQueryPayloads() throws Exception {
PayloadHelper helper = new PayloadHelper();
IndexSearcher searcher = helper.setUp(random(), new DefaultSimilarity(), 1000);
TermQuery tq = new TermQuery(new Term(PayloadHelper.FIELD, "seventy"));
Weight weight = searcher.createNormalizedWeight(tq, PostingsEnum.FLAG_PAYLOADS);
for (LeafReaderContext ctx : searcher.leafContexts) {
Scorer scorer = weight.scorer(ctx, null);
if (scorer.nextDoc() == DocIdSetIterator.NO_MORE_DOCS)
continue;
scorer.nextPosition();
BytesRef payload = scorer.getPayload();
assertEquals(payload.length, 1);
}
helper.tearDown();
}
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
@ -75,7 +76,7 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, PostingsEnum.FLAG_FREQS);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext();
BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs());
@ -137,7 +138,7 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, PostingsEnum.FLAG_FREQS);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
Scorer ts = weight.scorer(context, context.reader().getLiveDocs());
@ -156,7 +157,7 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, PostingsEnum.FLAG_FREQS);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
Scorer ts = weight.scorer(context, context.reader().getLiveDocs());

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.CompositeReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
@ -251,7 +252,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
}
// ... then all shards:
final Weight w = searcher.createNormalizedWeight(query, true);
final Weight w = searcher.createNormalizedWeight(query, PostingsEnum.FLAG_FREQS);
final TopDocs[] shardHits;
if (sort == null) {

View File

@ -181,7 +181,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
*/
public void testSpanNearScorerSkipTo1() throws Exception {
SpanNearQuery q = makeQuery();
Weight w = searcher.createNormalizedWeight(q, true);
Weight w = searcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
LeafReaderContext leave = topReaderContext.leaves().get(0);
Scorer s = w.scorer(leave, leave.reader().getLiveDocs());

View File

@ -430,7 +430,7 @@ public class TestSpans extends LuceneTestCase {
slop,
ordered);
spanScorer = searcher.createNormalizedWeight(snq, true).scorer(ctx, ctx.reader().getLiveDocs());
spanScorer = searcher.createNormalizedWeight(snq, PostingsEnum.FLAG_FREQS).scorer(ctx, ctx.reader().getLiveDocs());
} finally {
searcher.setSimilarity(oldSim);
}

View File

@ -73,8 +73,8 @@ class DrillSidewaysQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight baseWeight = baseQuery.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
final Weight baseWeight = baseQuery.createWeight(searcher, postingsFlags);
final Object[] drillDowns = new Object[drillDownQueries.length];
for(int dim=0;dim<drillDownQueries.length;dim++) {
Query query = drillDownQueries[dim];
@ -84,7 +84,7 @@ class DrillSidewaysQuery extends Query {
} else {
// TODO: would be nice if we could say "we will do no
// scoring" here....
drillDowns[dim] = searcher.rewrite(query).createWeight(searcher, needsScores);
drillDowns[dim] = searcher.rewrite(query).createWeight(searcher, postingsFlags);
}
}

View File

@ -44,6 +44,7 @@ import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
@ -1175,7 +1176,7 @@ public class TestGrouping extends LuceneTestCase {
System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV);
}
// Run 1st pass collector to get top groups per shard
final Weight w = topSearcher.createNormalizedWeight(query, true);
final Weight w = topSearcher.createNormalizedWeight(query, PostingsEnum.FLAG_FREQS);
final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>();
List<AbstractFirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>();
AbstractFirstPassGroupingCollector<?> firstPassCollector = null;

View File

@ -123,8 +123,8 @@ class TermsIncludingScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
final Weight originalWeight = originalQuery.createWeight(searcher, postingsFlags);
return new Weight(TermsIncludingScoreQuery.this) {
private TermsEnum segmentTermsEnum;

View File

@ -24,6 +24,7 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -83,8 +84,9 @@ public class ToChildBlockJoinQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores), parentsFilter, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
boolean needsScores = (postingsFlags & PostingsEnum.FLAG_FREQS) != 0;
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, postingsFlags), parentsFilter, needsScores);
}
/** Return our parent query. */

View File

@ -120,8 +120,8 @@ public class ToParentBlockJoinQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores), parentsFilter, scoreMode);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher, postingsFlags), parentsFilter, scoreMode);
}
/** Return our child query. */

View File

@ -1188,7 +1188,7 @@ public class TestBlockJoin extends LuceneTestCase {
new TermQuery(new Term("parent", "1"))));
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
Weight weight = s.createNormalizedWeight(q, true);
Weight weight = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null);
assertEquals(1, disi.advance(1));
r.close();
@ -1222,7 +1222,7 @@ public class TestBlockJoin extends LuceneTestCase {
new TermQuery(new Term("isparent", "yes"))));
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
Weight weight = s.createNormalizedWeight(q, true);
Weight weight = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null);
assertEquals(2, disi.advance(0));
r.close();

View File

@ -54,8 +54,8 @@ public class BoostingQuery extends Query {
public Query rewrite(IndexReader reader) throws IOException {
BooleanQuery result = new BooleanQuery() {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(this, searcher, needsScores, false) {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new BooleanWeight(this, searcher, postingsFlags, false) {
@Override
public float coord(int overlap, int max) {

View File

@ -187,12 +187,12 @@ public class CustomScoreQuery extends Query {
boolean qStrict;
float queryWeight;
public CustomWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public CustomWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
super(CustomScoreQuery.this);
this.subQueryWeight = subQuery.createWeight(searcher, needsScores);
this.subQueryWeight = subQuery.createWeight(searcher, postingsFlags);
this.valSrcWeights = new Weight[scoringQueries.length];
for(int i = 0; i < scoringQueries.length; i++) {
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores);
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, postingsFlags);
}
this.qStrict = strict;
}
@ -368,8 +368,8 @@ public class CustomScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new CustomWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new CustomWeight(searcher, postingsFlags);
}
/**

View File

@ -68,8 +68,8 @@ public class BoostedQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BoostedQuery.BoostedWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new BoostedQuery.BoostedWeight(searcher, postingsFlags);
}
private class BoostedWeight extends Weight {
@ -77,10 +77,10 @@ public class BoostedQuery extends Query {
Weight qWeight;
Map fcontext;
public BoostedWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public BoostedWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
super(BoostedQuery.this);
this.searcher = searcher;
this.qWeight = q.createWeight(searcher, needsScores);
this.qWeight = q.createWeight(searcher, postingsFlags);
this.fcontext = ValueSource.newContext(searcher);
boostVal.createWeight(fcontext,searcher);
}

View File

@ -204,7 +204,7 @@ public class FunctionQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new FunctionQuery.FunctionWeight(searcher);
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -72,7 +73,7 @@ public class QueryValueSource extends ValueSource {
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
Weight w = searcher.createNormalizedWeight(q, true);
Weight w = searcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
context.put(this, w);
}
}

View File

@ -188,7 +188,7 @@ public class TermAutomatonQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
IndexReaderContext context = searcher.getTopReaderContext();
Map<Integer,TermContext> termStates = new HashMap<>();

View File

@ -56,8 +56,8 @@ public class AssertingIndexSearcher extends IndexSearcher {
/** Ensures, that the returned {@code Weight} is not normalized again, which may produce wrong scores. */
@Override
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
final Weight w = super.createNormalizedWeight(query, needsScores);
public Weight createNormalizedWeight(Query query, int postingsFlags) throws IOException {
final Weight w = super.createNormalizedWeight(query, postingsFlags);
return new AssertingWeight(random, w) {
@Override

View File

@ -42,8 +42,8 @@ public class AssertingQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher, needsScores));
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher, postingsFlags));
}
@Override

View File

@ -32,6 +32,7 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@ -261,7 +262,7 @@ public class QueryUtils {
lastDoc[0] = doc;
try {
if (scorer == null) {
Weight w = s.createNormalizedWeight(q, true);
Weight w = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
LeafReaderContext context = readerContextArray.get(leafPtr);
scorer = w.scorer(context, context.reader().getLiveDocs());
}
@ -313,7 +314,7 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q, true);
Weight w = indexSearcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext();
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
@ -335,7 +336,7 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q, true);
Weight w = indexSearcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
LeafReaderContext ctx = previousReader.getContext();
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
@ -367,7 +368,7 @@ public class QueryUtils {
try {
long startMS = System.currentTimeMillis();
for (int i=lastDoc[0]+1; i<=doc; i++) {
Weight w = s.createNormalizedWeight(q, true);
Weight w = s.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
Scorer scorer = w.scorer(context.get(leafPtr), liveDocs);
Assert.assertTrue("query collected "+doc+" but skipTo("+i+") says no more docs!",scorer.advance(i) != DocIdSetIterator.NO_MORE_DOCS);
Assert.assertEquals("query collected "+doc+" but skipTo("+i+") got to "+scorer.docID(),doc,scorer.docID());
@ -400,7 +401,7 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q, true);
Weight w = indexSearcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
@ -421,7 +422,7 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q, true);
Weight w = indexSearcher.createNormalizedWeight(q, PostingsEnum.FLAG_FREQS);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
@ -432,7 +433,7 @@ public class QueryUtils {
/** Check that the scorer and bulk scorer advance consistently. */
public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException {
Weight weight = searcher.createNormalizedWeight(query, true);
Weight weight = searcher.createNormalizedWeight(query, PostingsEnum.FLAG_FREQS);
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs());
final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs());

View File

@ -563,7 +563,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
// if we were supposed to use bboxQuery, then we should have been rewritten using that query
assert bboxQuery == null;
return new SpatialWeight(searcher);

View File

@ -80,7 +80,7 @@ public class ExportQParserPlugin extends QParserPlugin {
}
public Weight createWeight(IndexSearcher searcher) throws IOException {
return mainQuery.createWeight(searcher, true);
return mainQuery.createWeight(searcher, PostingsEnum.FLAG_FREQS);
}
public Query rewrite(IndexReader reader) throws IOException {

View File

@ -210,7 +210,7 @@ class JoinQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return new JoinQueryWeight((SolrIndexSearcher)searcher);
}

View File

@ -27,6 +27,7 @@ import com.carrotsearch.hppc.IntFloatOpenHashMap;
import com.carrotsearch.hppc.IntIntOpenHashMap;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -171,8 +172,9 @@ public class ReRankQParserPlugin extends QParserPlugin {
}
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException{
return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, needsScores);
@Override
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException{
return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, postingsFlags);
}
}
@ -182,12 +184,12 @@ public class ReRankQParserPlugin extends QParserPlugin {
private Weight mainWeight;
private double reRankWeight;
public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores) throws IOException {
public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, int postingsFlags) throws IOException {
super(mainQuery);
this.reRankQuery = reRankQuery;
this.searcher = searcher;
this.reRankWeight = reRankWeight;
this.mainWeight = mainQuery.createWeight(searcher, needsScores);
this.mainWeight = mainQuery.createWeight(searcher, postingsFlags);
}
public float getValueForNormalization() throws IOException {

View File

@ -229,7 +229,7 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) {
try {
return new SolrConstantScoreQuery.ConstantWeight(searcher);
} catch (IOException e) {

View File

@ -1113,7 +1113,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
List<Weight> weights = new ArrayList<>(notCached.size());
for (Query q : notCached) {
Query qq = QueryUtils.makeQueryable(q);
weights.add(createNormalizedWeight(qq, true));
weights.add(createNormalizedWeight(qq, PostingsEnum.FLAG_FREQS));
}
pf.filter = new FilterImpl(answer, weights);
} else {

View File

@ -53,8 +53,8 @@ public class WrappedQuery extends ExtendedQueryBase {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return q.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
return q.createWeight(searcher, postingsFlags);
}
@Override

View File

@ -53,8 +53,8 @@ public class IgnoreAcceptDocsQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
Weight inner = q.createWeight(searcher, needsScores);
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
Weight inner = q.createWeight(searcher, postingsFlags);
return new IADWeight(inner);
}

View File

@ -64,10 +64,10 @@ final class DeleteByQueryWrapper extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, int postingsFlags) throws IOException {
final LeafReader wrapped = wrap((LeafReader) searcher.getIndexReader());
final IndexSearcher privateContext = new IndexSearcher(wrapped);
final Weight inner = in.createWeight(privateContext, needsScores);
final Weight inner = in.createWeight(privateContext, postingsFlags);
return new Weight(DeleteByQueryWrapper.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); }

View File

@ -32,6 +32,7 @@ import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.IndexSearcher;
@ -114,8 +115,9 @@ public class TestRankQueryPlugin extends QParserPlugin {
return false;
}
public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores) throws IOException{
return q.createWeight(indexSearcher, needsScores);
@Override
public Weight createWeight(IndexSearcher indexSearcher, int postingsFlags) throws IOException{
return q.createWeight(indexSearcher, postingsFlags);
}
public void setBoost(float boost) {