LUCENE-3514: also support IndexSearcher.searchAfter when Sort is used

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1339137 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-05-16 12:28:57 +00:00
parent 560a287e68
commit ad7914c5b2
18 changed files with 807 additions and 103 deletions

View File

@ -266,6 +266,11 @@ Changes in backwards compatibility policy
* LUCENE-3970: Rename Fields.getUniqueFieldCount -> .size() and * LUCENE-3970: Rename Fields.getUniqueFieldCount -> .size() and
Terms.getUniqueTermCount -> .size(). (Iulius Curt via Mike McCandless) Terms.getUniqueTermCount -> .size(). (Iulius Curt via Mike McCandless)
* LUCENE-3514: IndexSearcher.setDefaultFieldSortScoring was removed
and replaced with per-search control via new expert search methods
that take two booleans indicating whether hit scores and max
score should be computed. (Mike McCandless)
Changes in Runtime Behavior Changes in Runtime Behavior
* LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you * LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you
@ -857,6 +862,10 @@ New features
* LUCENE-4039: Add AddIndexesTask to benchmark, which uses IW.addIndexes. * LUCENE-4039: Add AddIndexesTask to benchmark, which uses IW.addIndexes.
(Shai Erera) (Shai Erera)
* LUCENE-3514: Added IndexSearcher.searchAfter when Sort is used,
returning results after a specified FieldDoc for deep
paging. (Mike McCandless)
Optimizations Optimizations
* LUCENE-2588: Don't store unnecessary suffixes when writing the terms * LUCENE-2588: Don't store unnecessary suffixes when writing the terms

View File

@ -190,6 +190,10 @@ public abstract class FieldComparator<T> {
} }
} }
/** Returns negative result if the doc's value is less
* than the provided value. */
public abstract int compareDocToValue(int doc, T value) throws IOException;
public static abstract class NumericComparator<T extends Number> extends FieldComparator<T> { public static abstract class NumericComparator<T extends Number> extends FieldComparator<T> {
protected final T missingValue; protected final T missingValue;
protected final String field; protected final String field;
@ -274,9 +278,19 @@ public abstract class FieldComparator<T> {
public Byte value(int slot) { public Byte value(int slot) {
return Byte.valueOf(values[slot]); return Byte.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Byte value) {
byte docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
return docValue - value.byteValue();
}
} }
/** Parses field's values as double (using {@link /** Parses field's values as double (using {@link
* FieldCache#getDoubles} and sorts by ascending value */ * FieldCache#getDoubles} and sorts by ascending value */
public static final class DoubleComparator extends NumericComparator<Double> { public static final class DoubleComparator extends NumericComparator<Double> {
@ -351,6 +365,24 @@ public abstract class FieldComparator<T> {
public Double value(int slot) { public Double value(int slot) {
return Double.valueOf(values[slot]); return Double.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Double valueObj) {
final double value = valueObj.doubleValue();
double docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Uses float index values to sort by ascending value */ /** Uses float index values to sort by ascending value */
@ -415,6 +447,19 @@ public abstract class FieldComparator<T> {
public Double value(int slot) { public Double value(int slot) {
return Double.valueOf(values[slot]); return Double.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Double valueObj) {
final double value = valueObj.doubleValue();
final double docValue = currentReaderValues.getFloat(doc);
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Parses field's values as float (using {@link /** Parses field's values as float (using {@link
@ -494,6 +539,24 @@ public abstract class FieldComparator<T> {
public Float value(int slot) { public Float value(int slot) {
return Float.valueOf(values[slot]); return Float.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Float valueObj) {
final float value = valueObj.floatValue();
float docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Parses field's values as short (using {@link /** Parses field's values as short (using {@link
@ -556,6 +619,18 @@ public abstract class FieldComparator<T> {
public Short value(int slot) { public Short value(int slot) {
return Short.valueOf(values[slot]); return Short.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Short valueObj) {
final short value = valueObj.shortValue();
short docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
return docValue - value;
}
} }
/** Parses field's values as int (using {@link /** Parses field's values as int (using {@link
@ -640,6 +715,24 @@ public abstract class FieldComparator<T> {
public Integer value(int slot) { public Integer value(int slot) {
return Integer.valueOf(values[slot]); return Integer.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) {
final int value = valueObj.intValue();
int docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Loads int index values and sorts by ascending value. */ /** Loads int index values and sorts by ascending value. */
@ -708,6 +801,19 @@ public abstract class FieldComparator<T> {
public Long value(int slot) { public Long value(int slot) {
return Long.valueOf(values[slot]); return Long.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Long valueObj) {
final long value = valueObj.longValue();
final long docValue = currentReaderValues.getInt(doc);
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Parses field's values as long (using {@link /** Parses field's values as long (using {@link
@ -788,6 +894,24 @@ public abstract class FieldComparator<T> {
public Long value(int slot) { public Long value(int slot) {
return Long.valueOf(values[slot]); return Long.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Long valueObj) {
final long value = valueObj.longValue();
long docValue = currentReaderValues[doc];
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Sorts by descending relevance. NOTE: if you are /** Sorts by descending relevance. NOTE: if you are
@ -815,12 +939,14 @@ public abstract class FieldComparator<T> {
@Override @Override
public int compareBottom(int doc) throws IOException { public int compareBottom(int doc) throws IOException {
float score = scorer.score(); float score = scorer.score();
assert !Float.isNaN(score);
return bottom > score ? -1 : (bottom < score ? 1 : 0); return bottom > score ? -1 : (bottom < score ? 1 : 0);
} }
@Override @Override
public void copy(int slot, int doc) throws IOException { public void copy(int slot, int doc) throws IOException {
scores[slot] = scorer.score(); scores[slot] = scorer.score();
assert !Float.isNaN(scores[slot]);
} }
@Override @Override
@ -857,6 +983,22 @@ public abstract class FieldComparator<T> {
// sorts descending: // sorts descending:
return second.compareTo(first); return second.compareTo(first);
} }
@Override
public int compareDocToValue(int doc, Float valueObj) throws IOException {
final float value = valueObj.floatValue();
float docValue = scorer.score();
assert !Float.isNaN(docValue);
if (docValue < value) {
// reverse of FloatComparator
return 1;
} else if (docValue > value) {
// reverse of FloatComparator
return -1;
} else {
return 0;
}
}
} }
/** Sorts by ascending docID */ /** Sorts by ascending docID */
@ -904,6 +1046,19 @@ public abstract class FieldComparator<T> {
public Integer value(int slot) { public Integer value(int slot) {
return Integer.valueOf(docIDs[slot]); return Integer.valueOf(docIDs[slot]);
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) {
final int value = valueObj.intValue();
int docValue = docBase + doc;
if (docValue < value) {
return -1;
} else if (docValue > value) {
return 1;
} else {
return 0;
}
}
} }
/** Sorts by field's natural Term sort order, using /** Sorts by field's natural Term sort order, using
@ -998,6 +1153,20 @@ public abstract class FieldComparator<T> {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
BytesRef docValue = termsIndex.getTerm(doc, tempBR);
if (docValue == null) {
if (value == null) {
return 0;
}
return -1;
} else if (value == null) {
return 1;
}
return docValue.compareTo(value);
}
/** Base class for specialized (per bit width of the /** Base class for specialized (per bit width of the
* ords) per-segment comparator. NOTE: this is messy; * ords) per-segment comparator. NOTE: this is messy;
* we do this only because hotspot can't reliably inline * we do this only because hotspot can't reliably inline
@ -1038,6 +1207,11 @@ public abstract class FieldComparator<T> {
} }
return val1.compareTo(val2); return val1.compareTo(val2);
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
return TermOrdValComparator.this.compareDocToValue(doc, value);
}
} }
// Used per-segment when bit width of doc->ord is 8: // Used per-segment when bit width of doc->ord is 8:
@ -1385,6 +1559,11 @@ public abstract class FieldComparator<T> {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
return termsIndex.getBytes(doc, tempBR).compareTo(value);
}
// TODO: would be nice to share these specialized impls // TODO: would be nice to share these specialized impls
// w/ TermOrdValComparator // w/ TermOrdValComparator
@ -1422,6 +1601,11 @@ public abstract class FieldComparator<T> {
assert val2 != null; assert val2 != null;
return comp.compare(val1, val2); return comp.compare(val1, val2);
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
return TermOrdValDocValuesComparator.this.compareDocToValue(doc, value);
}
} }
// Used per-segment when bit width of doc->ord is 8: // Used per-segment when bit width of doc->ord is 8:
@ -1801,6 +1985,11 @@ public abstract class FieldComparator<T> {
} }
return val1.compareTo(val2); return val1.compareTo(val2);
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
return docTerms.getTerm(doc, tempBR).compareTo(value);
}
} }
/** Sorts by field's natural Term sort order. All /** Sorts by field's natural Term sort order. All
@ -1869,6 +2058,11 @@ public abstract class FieldComparator<T> {
assert val2 != null; assert val2 != null;
return val1.compareTo(val2); return val1.compareTo(val2);
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
return docTerms.getBytes(doc, tempBR).compareTo(value);
}
} }
final protected static int binarySearch(BytesRef br, DocTermsIndex a, BytesRef key) { final protected static int binarySearch(BytesRef br, DocTermsIndex a, BytesRef key) {

View File

@ -304,14 +304,51 @@ public class IndexSearcher {
* <code>sort</code>. * <code>sort</code>.
* *
* <p>NOTE: this does not compute scores by default; use * <p>NOTE: this does not compute scores by default; use
* {@link IndexSearcher#setDefaultFieldSortScoring} to * {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to
* enable scoring. * control scoring.
* *
* @throws BooleanQuery.TooManyClauses * @throws BooleanQuery.TooManyClauses
*/ */
public TopFieldDocs search(Query query, Filter filter, int n, public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort) throws IOException { Sort sort) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort); return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, false, false);
}
/** Search implementation with arbitrary sorting, plus
* control over whether hit scores and max score
* should be computed. Finds
* the top <code>n</code> hits for <code>query</code>, applying
* <code>filter</code> if non-null, and sorting the hits by the criteria in
* <code>sort</code>. If <code>doDocScores</code> is <code>true</code>
* then the score of each hit will be computed and
* returned. If <code>doMaxScore</code> is
* <code>true</code> then the maximum score over all
* collected hits will be computed.
*
* @throws BooleanQuery.TooManyClauses
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, doDocScores, doMaxScore);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null,
* where all results are after a previous result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true, false, false);
} }
/** /**
@ -324,7 +361,52 @@ public class IndexSearcher {
*/ */
public TopFieldDocs search(Query query, int n, public TopFieldDocs search(Query query, int n,
Sort sort) throws IOException { Sort sort) throws IOException {
return search(createNormalizedWeight(query), n, sort); return search(createNormalizedWeight(query), n, sort, false, false);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
* result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(query), (FieldDoc) after, n, sort, true, false, false);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
* result (<code>after</code>), allowing control over
* whether hit scores and max score should be computed.
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets. If <code>doDocScores</code> is <code>true</code>
* then the score of each hit will be computed and
* returned. If <code>doMaxScore</code> is
* <code>true</code> then the maximum score over all
* collected hits will be computed.
*
* @throws BooleanQuery.TooManyClauses
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true,
doDocScores, doMaxScore);
} }
/** Expert: Low-level search implementation. Finds the top <code>n</code> /** Expert: Low-level search implementation. Finds the top <code>n</code>
@ -383,7 +465,9 @@ public class IndexSearcher {
return collector.topDocs(); return collector.topDocs();
} }
/** Expert: Low-level search implementation with arbitrary sorting. Finds /** Expert: Low-level search implementation with arbitrary
* sorting and control over whether hit scores and max
* score should be computed. Finds
* the top <code>n</code> hits for <code>query</code> and sorting the hits * the top <code>n</code> hits for <code>query</code> and sorting the hits
* by the criteria in <code>sort</code>. * by the criteria in <code>sort</code>.
* *
@ -393,12 +477,13 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses * @throws BooleanQuery.TooManyClauses
*/ */
protected TopFieldDocs search(Weight weight, protected TopFieldDocs search(Weight weight,
final int nDocs, Sort sort) throws IOException { final int nDocs, Sort sort,
return search(weight, nDocs, sort, true); boolean doDocScores, boolean doMaxScore) throws IOException {
return search(weight, null, nDocs, sort, true, doDocScores, doMaxScore);
} }
/** /**
* Just like {@link #search(Weight, int, Sort)}, but you choose * Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should * whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields. * be set by specifying fillFields.
* *
@ -408,27 +493,29 @@ public class IndexSearcher {
* then pass that to {@link #search(AtomicReaderContext[], Weight, * then pass that to {@link #search(AtomicReaderContext[], Weight,
* Collector)}.</p> * Collector)}.</p>
*/ */
protected TopFieldDocs search(Weight weight, int nDocs, protected TopFieldDocs search(Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields) Sort sort, boolean fillFields,
boolean doDocScores, boolean doMaxScore)
throws IOException { throws IOException {
if (sort == null) throw new NullPointerException(); if (sort == null) throw new NullPointerException();
if (executor == null) { if (executor == null) {
// use all leaves here! // use all leaves here!
return search (leafContexts, weight, nDocs, sort, fillFields); return search(leafContexts, weight, after, nDocs, sort, fillFields, doDocScores, doMaxScore);
} else { } else {
final TopFieldCollector topCollector = TopFieldCollector.create(sort, nDocs, final TopFieldCollector topCollector = TopFieldCollector.create(sort, nDocs,
after,
fillFields, fillFields,
fieldSortDoTrackScores, doDocScores,
fieldSortDoMaxScore, doMaxScore,
false); false);
final Lock lock = new ReentrantLock(); final Lock lock = new ReentrantLock();
final ExecutionHelper<TopFieldDocs> runner = new ExecutionHelper<TopFieldDocs>(executor); final ExecutionHelper<TopFieldDocs> runner = new ExecutionHelper<TopFieldDocs>(executor);
for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
runner.submit( runner.submit(
new SearcherCallableWithSort(lock, this, leafSlices[i], weight, nDocs, topCollector, sort)); new SearcherCallableWithSort(lock, this, leafSlices[i], weight, after, nDocs, topCollector, sort, doDocScores, doMaxScore));
} }
int totalHits = 0; int totalHits = 0;
float maxScore = Float.NEGATIVE_INFINITY; float maxScore = Float.NEGATIVE_INFINITY;
@ -447,18 +534,12 @@ public class IndexSearcher {
/** /**
* Just like {@link #search(Weight, int, Sort)}, but you choose * Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should * whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields. * be set by specifying fillFields.
*
* <p>NOTE: this does not compute scores by default. If you
* need scores, create a {@link TopFieldCollector}
* instance by calling {@link TopFieldCollector#create} and
* then pass that to {@link #search(AtomicReaderContext[], Weight,
* Collector)}.</p>
*/ */
protected TopFieldDocs search(AtomicReaderContext[] leaves, Weight weight, int nDocs, protected TopFieldDocs search(AtomicReaderContext[] leaves, Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields) throws IOException { Sort sort, boolean fillFields, boolean doDocScores, boolean doMaxScore) throws IOException {
// single thread // single thread
int limit = reader.maxDoc(); int limit = reader.maxDoc();
if (limit == 0) { if (limit == 0) {
@ -466,8 +547,9 @@ public class IndexSearcher {
} }
nDocs = Math.min(nDocs, limit); nDocs = Math.min(nDocs, limit);
TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, after,
fillFields, fieldSortDoTrackScores, fieldSortDoMaxScore, !weight.scoresDocsOutOfOrder()); fillFields, doDocScores,
doMaxScore, !weight.scoresDocsOutOfOrder());
search(leaves, weight, collector); search(leaves, weight, collector);
return (TopFieldDocs) collector.topDocs(); return (TopFieldDocs) collector.topDocs();
} }
@ -553,26 +635,6 @@ public class IndexSearcher {
return weight.explain(leafContexts[n], deBasedDoc); return weight.explain(leafContexts[n], deBasedDoc);
} }
private boolean fieldSortDoTrackScores;
private boolean fieldSortDoMaxScore;
/** By default, no scores are computed when sorting by
* field (using {@link #search(Query,Filter,int,Sort)}).
* You can change that, per IndexSearcher instance, by
* calling this method. Note that this will incur a CPU
* cost.
*
* @param doTrackScores If true, then scores are
* returned for every matching document in {@link
* TopFieldDocs}.
*
* @param doMaxScore If true, then the max score for all
* matching docs is computed. */
public void setDefaultFieldSortScoring(boolean doTrackScores, boolean doMaxScore) {
fieldSortDoTrackScores = doTrackScores;
fieldSortDoMaxScore = doMaxScore;
}
/** /**
* Creates a normalized weight for a top-level {@link Query}. * Creates a normalized weight for a top-level {@link Query}.
* The query is rewritten by this method and {@link Query#createWeight} called, * The query is rewritten by this method and {@link Query#createWeight} called,
@ -626,7 +688,7 @@ public class IndexSearcher {
} }
public TopDocs call() throws IOException { public TopDocs call() throws IOException {
final TopDocs docs = searcher.search (slice.leaves, weight, after, nDocs); final TopDocs docs = searcher.search(slice.leaves, weight, after, nDocs);
final ScoreDoc[] scoreDocs = docs.scoreDocs; final ScoreDoc[] scoreDocs = docs.scoreDocs;
//it would be so nice if we had a thread-safe insert //it would be so nice if we had a thread-safe insert
lock.lock(); lock.lock();
@ -657,9 +719,13 @@ public class IndexSearcher {
private final TopFieldCollector hq; private final TopFieldCollector hq;
private final Sort sort; private final Sort sort;
private final LeafSlice slice; private final LeafSlice slice;
private final FieldDoc after;
private final boolean doDocScores;
private final boolean doMaxScore;
public SearcherCallableWithSort(Lock lock, IndexSearcher searcher, LeafSlice slice, Weight weight, public SearcherCallableWithSort(Lock lock, IndexSearcher searcher, LeafSlice slice, Weight weight,
int nDocs, TopFieldCollector hq, Sort sort) { FieldDoc after, int nDocs, TopFieldCollector hq, Sort sort,
boolean doDocScores, boolean doMaxScore) {
this.lock = lock; this.lock = lock;
this.searcher = searcher; this.searcher = searcher;
this.weight = weight; this.weight = weight;
@ -667,6 +733,9 @@ public class IndexSearcher {
this.hq = hq; this.hq = hq;
this.sort = sort; this.sort = sort;
this.slice = slice; this.slice = slice;
this.after = after;
this.doDocScores = doDocScores;
this.doMaxScore = doMaxScore;
} }
private final class FakeScorer extends Scorer { private final class FakeScorer extends Scorer {
@ -707,7 +776,7 @@ public class IndexSearcher {
public TopFieldDocs call() throws IOException { public TopFieldDocs call() throws IOException {
assert slice.leaves.length == 1; assert slice.leaves.length == 1;
final TopFieldDocs docs = searcher.search (slice.leaves, weight, nDocs, sort, true); final TopFieldDocs docs = searcher.search(slice.leaves, weight, after, nDocs, sort, true, doDocScores, doMaxScore);
lock.lock(); lock.lock();
try { try {
final int base = slice.leaves[0].docBase; final int base = slice.leaves[0].docBase;
@ -718,6 +787,11 @@ public class IndexSearcher {
fakeScorer.score = scoreDoc.score; fakeScorer.score = scoreDoc.score;
hq.collect(scoreDoc.doc-base); hq.collect(scoreDoc.doc-base);
} }
// Carry over maxScore from sub:
if (doMaxScore && docs.getMaxScore() > hq.maxScore) {
hq.maxScore = docs.getMaxScore();
}
} finally { } finally {
lock.unlock(); lock.unlock();
} }

View File

@ -45,7 +45,7 @@ public class TopDocs {
/** Sets the maximum score value encountered. */ /** Sets the maximum score value encountered. */
public void setMaxScore(float maxScore) { public void setMaxScore(float maxScore) {
this.maxScore=maxScore; this.maxScore = maxScore;
} }
/** Constructs a TopDocs with a default maxScore=Float.NaN. */ /** Constructs a TopDocs with a default maxScore=Float.NaN. */

View File

@ -843,6 +843,166 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
} }
/*
* Implements a TopFieldCollector when after != null.
*/
private final static class PagingFieldCollector extends TopFieldCollector {
Scorer scorer;
int collectedHits;
final FieldComparator<?>[] comparators;
final int[] reverseMul;
final FieldValueHitQueue<Entry> queue;
final boolean trackDocScores;
final boolean trackMaxScore;
final FieldDoc after;
int afterDoc;
public PagingFieldCollector(
FieldValueHitQueue<Entry> queue, FieldDoc after, int numHits, boolean fillFields,
boolean trackDocScores, boolean trackMaxScore)
throws IOException {
super(queue, numHits, fillFields);
this.queue = queue;
this.trackDocScores = trackDocScores;
this.trackMaxScore = trackMaxScore;
this.after = after;
comparators = queue.getComparators();
reverseMul = queue.getReverseMul();
// Must set maxScore to NEG_INF, or otherwise Math.max always returns NaN.
maxScore = Float.NEGATIVE_INFINITY;
}
void updateBottom(int doc, float score) {
bottom.doc = docBase + doc;
bottom.score = score;
bottom = pq.updateTop();
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public void collect(int doc) throws IOException {
totalHits++;
//System.out.println(" collect doc=" + doc);
// Check if this hit was already collected on a
// previous page:
boolean sameValues = true;
for(int compIDX=0;compIDX<comparators.length;compIDX++) {
final FieldComparator comp = comparators[compIDX];
final int cmp = reverseMul[compIDX] * comp.compareDocToValue(doc, after.fields[compIDX]);
if (cmp < 0) {
// Already collected on a previous page
//System.out.println(" skip: before");
return;
} else if (cmp > 0) {
// Not yet collected
sameValues = false;
//System.out.println(" keep: after");
break;
}
}
// Tie-break by docID:
if (sameValues && doc <= afterDoc) {
// Already collected on a previous page
//System.out.println(" skip: tie-break");
return;
}
collectedHits++;
float score = Float.NaN;
if (trackMaxScore) {
score = scorer.score();
if (score > maxScore) {
maxScore = score;
}
}
if (queueFull) {
// Fastmatch: return if this hit is not competitive
for (int i = 0;; i++) {
final int c = reverseMul[i] * comparators[i].compareBottom(doc);
if (c < 0) {
// Definitely not competitive.
return;
} else if (c > 0) {
// Definitely competitive.
break;
} else if (i == comparators.length - 1) {
// This is the equals case.
if (doc + docBase > bottom.doc) {
// Definitely not competitive
return;
}
break;
}
}
// This hit is competitive - replace bottom element in queue & adjustTop
for (int i = 0; i < comparators.length; i++) {
comparators[i].copy(bottom.slot, doc);
}
// Compute score only if it is competitive.
if (trackDocScores && !trackMaxScore) {
score = scorer.score();
}
updateBottom(doc, score);
for (int i = 0; i < comparators.length; i++) {
comparators[i].setBottom(bottom.slot);
}
} else {
// Startup transient: queue hasn't gathered numHits yet
final int slot = collectedHits - 1;
//System.out.println(" slot=" + slot);
// Copy hit into queue
for (int i = 0; i < comparators.length; i++) {
comparators[i].copy(slot, doc);
}
// Compute score only if it is competitive.
if (trackDocScores && !trackMaxScore) {
score = scorer.score();
}
bottom = pq.add(new Entry(slot, docBase + doc, score));
queueFull = collectedHits == numHits;
if (queueFull) {
for (int i = 0; i < comparators.length; i++) {
comparators[i].setBottom(bottom.slot);
}
}
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
for (int i = 0; i < comparators.length; i++) {
comparators[i].setScorer(scorer);
}
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
docBase = context.docBase;
afterDoc = after.doc - docBase;
for (int i = 0; i < comparators.length; i++) {
queue.setComparator(i, comparators[i].setNextReader(context));
}
}
}
private static final ScoreDoc[] EMPTY_SCOREDOCS = new ScoreDoc[0]; private static final ScoreDoc[] EMPTY_SCOREDOCS = new ScoreDoc[0];
private final boolean fillFields; private final boolean fillFields;
@ -909,6 +1069,52 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
boolean fillFields, boolean trackDocScores, boolean trackMaxScore, boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
boolean docsScoredInOrder) boolean docsScoredInOrder)
throws IOException { throws IOException {
return create(sort, numHits, null, fillFields, trackDocScores, trackMaxScore, docsScoredInOrder);
}
/**
* Creates a new {@link TopFieldCollector} from the given
* arguments.
*
* <p><b>NOTE</b>: The instances returned by this method
* pre-allocate a full array of length
* <code>numHits</code>.
*
* @param sort
* the sort criteria (SortFields).
* @param numHits
* the number of results to collect.
* @param after
* only hits after this FieldDoc will be collected
* @param fillFields
* specifies whether the actual field values should be returned on
* the results (FieldDoc).
* @param trackDocScores
* specifies whether document scores should be tracked and set on the
* results. Note that if set to false, then the results' scores will
* be set to Float.NaN. Setting this to true affects performance, as
* it incurs the score computation on each competitive result.
* Therefore if document scores are not required by the application,
* it is recommended to set it to false.
* @param trackMaxScore
* specifies whether the query's maxScore should be tracked and set
* on the resulting {@link TopDocs}. Note that if set to false,
* {@link TopDocs#getMaxScore()} returns Float.NaN. Setting this to
* true affects performance as it incurs the score computation on
* each result. Also, setting this true automatically sets
* <code>trackDocScores</code> to true as well.
* @param docsScoredInOrder
* specifies whether documents are scored in doc Id order or not by
* the given {@link Scorer} in {@link #setScorer(Scorer)}.
* @return a {@link TopFieldCollector} instance which will sort the results by
* the sort criteria.
* @throws IOException
*/
public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after,
boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
boolean docsScoredInOrder)
throws IOException {
if (sort.fields.length == 0) { if (sort.fields.length == 0) {
throw new IllegalArgumentException("Sort must contain at least one field"); throw new IllegalArgumentException("Sort must contain at least one field");
} }
@ -918,43 +1124,56 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
} }
FieldValueHitQueue<Entry> queue = FieldValueHitQueue.create(sort.fields, numHits); FieldValueHitQueue<Entry> queue = FieldValueHitQueue.create(sort.fields, numHits);
if (queue.getComparators().length == 1) {
if (after == null) {
if (queue.getComparators().length == 1) {
if (docsScoredInOrder) {
if (trackMaxScore) {
return new OneComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
} else if (trackDocScores) {
return new OneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else {
return new OneComparatorNonScoringCollector(queue, numHits, fillFields);
}
} else {
if (trackMaxScore) {
return new OutOfOrderOneComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
} else if (trackDocScores) {
return new OutOfOrderOneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else {
return new OutOfOrderOneComparatorNonScoringCollector(queue, numHits, fillFields);
}
}
}
// multiple comparators.
if (docsScoredInOrder) { if (docsScoredInOrder) {
if (trackMaxScore) { if (trackMaxScore) {
return new OneComparatorScoringMaxScoreCollector(queue, numHits, fillFields); return new MultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
} else if (trackDocScores) { } else if (trackDocScores) {
return new OneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields); return new MultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else { } else {
return new OneComparatorNonScoringCollector(queue, numHits, fillFields); return new MultiComparatorNonScoringCollector(queue, numHits, fillFields);
} }
} else { } else {
if (trackMaxScore) { if (trackMaxScore) {
return new OutOfOrderOneComparatorScoringMaxScoreCollector(queue, numHits, fillFields); return new OutOfOrderMultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
} else if (trackDocScores) { } else if (trackDocScores) {
return new OutOfOrderOneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields); return new OutOfOrderMultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else { } else {
return new OutOfOrderOneComparatorNonScoringCollector(queue, numHits, fillFields); return new OutOfOrderMultiComparatorNonScoringCollector(queue, numHits, fillFields);
} }
} }
}
// multiple comparators.
if (docsScoredInOrder) {
if (trackMaxScore) {
return new MultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
} else if (trackDocScores) {
return new MultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else {
return new MultiComparatorNonScoringCollector(queue, numHits, fillFields);
}
} else { } else {
if (trackMaxScore) { if (after.fields == null) {
return new OutOfOrderMultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields); throw new IllegalArgumentException("after.fields wasn't set; you must pass fillFields=true for the previous search");
} else if (trackDocScores) {
return new OutOfOrderMultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
} else {
return new OutOfOrderMultiComparatorNonScoringCollector(queue, numHits, fillFields);
} }
if (after.fields.length != sort.getSort().length) {
throw new IllegalArgumentException("after.fields has " + after.fields.length + " values but sort has " + sort.getSort().length);
}
return new PagingFieldCollector(queue, after, numHits, fillFields, trackDocScores, trackMaxScore);
} }
} }

View File

@ -21,20 +21,19 @@ package org.apache.lucene.search;
/** Represents hits returned by {@link /** Represents hits returned by {@link
* IndexSearcher#search(Query,Filter,int,Sort)}. * IndexSearcher#search(Query,Filter,int,Sort)}.
*/ */
public class TopFieldDocs public class TopFieldDocs extends TopDocs {
extends TopDocs {
/** The fields which were used to sort results by. */ /** The fields which were used to sort results by. */
public SortField[] fields; public SortField[] fields;
/** Creates one of these objects. /** Creates one of these objects.
* @param totalHits Total number of hits for the query. * @param totalHits Total number of hits for the query.
* @param scoreDocs The top hits for the query. * @param scoreDocs The top hits for the query.
* @param fields The sort criteria used to find the top hits. * @param fields The sort criteria used to find the top hits.
* @param maxScore The maximum score encountered. * @param maxScore The maximum score encountered.
*/ */
public TopFieldDocs (int totalHits, ScoreDoc[] scoreDocs, SortField[] fields, float maxScore) { public TopFieldDocs (int totalHits, ScoreDoc[] scoreDocs, SortField[] fields, float maxScore) {
super (totalHits, scoreDocs, maxScore); super (totalHits, scoreDocs, maxScore);
this.fields = fields; this.fields = fields;
} }
} }

View File

@ -139,6 +139,10 @@ final class JustCompileSearch {
throw new UnsupportedOperationException(UNSUPPORTED_MSG); throw new UnsupportedOperationException(UNSUPPORTED_MSG);
} }
@Override
public int compareDocToValue(int doc, Object value) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
} }
static final class JustCompileFieldComparatorSource extends FieldComparatorSource { static final class JustCompileFieldComparatorSource extends FieldComparatorSource {

View File

@ -188,6 +188,14 @@ class ElevationComparatorSource extends FieldComparatorSource {
public Integer value(int slot) { public Integer value(int slot) {
return Integer.valueOf(values[slot]); return Integer.valueOf(values[slot]);
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) throws IOException {
final int value = valueObj.intValue();
final int docValue = docVal(doc);
// values will be small enough that there is no overflow concern
return value - docValue;
}
}; };
} }
} }

View File

@ -17,12 +17,25 @@ package org.apache.lucene.search;
* limitations under the License. * limitations under the License.
*/ */
import java.util.Arrays;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntDocValuesField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.SortedBytesDocValuesField;
import org.apache.lucene.document.StraightBytesDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.English; import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -30,11 +43,19 @@ import org.apache.lucene.util._TestUtil;
/** /**
* Tests IndexSearcher's searchAfter() method * Tests IndexSearcher's searchAfter() method
*/ */
public class TestSearchAfter extends LuceneTestCase { public class TestSearchAfter extends LuceneTestCase {
private Directory dir; private Directory dir;
private IndexReader reader; private IndexReader reader;
private IndexSearcher searcher; private IndexSearcher searcher;
boolean supportsDocValues = Codec.getDefault().getName().equals("Lucene3x") == false;
private static SortField useDocValues(SortField field) {
field.setUseIndexValues(true);
return field;
}
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
@ -45,6 +66,25 @@ public class TestSearchAfter extends LuceneTestCase {
Document document = new Document(); Document document = new Document();
document.add(newField("english", English.intToEnglish(i), TextField.TYPE_UNSTORED)); document.add(newField("english", English.intToEnglish(i), TextField.TYPE_UNSTORED));
document.add(newField("oddeven", (i % 2 == 0) ? "even" : "odd", TextField.TYPE_UNSTORED)); document.add(newField("oddeven", (i % 2 == 0) ? "even" : "odd", TextField.TYPE_UNSTORED));
document.add(newField("byte", "" + ((byte) random().nextInt()), StringField.TYPE_UNSTORED));
document.add(newField("short", "" + ((short) random().nextInt()), StringField.TYPE_UNSTORED));
document.add(new IntField("int", random().nextInt()));
document.add(new LongField("long", random().nextLong()));
document.add(new FloatField("float", random().nextFloat()));
document.add(new DoubleField("double", random().nextDouble()));
document.add(newField("bytes", _TestUtil.randomRealisticUnicodeString(random()), StringField.TYPE_UNSTORED));
document.add(newField("bytesval", _TestUtil.randomRealisticUnicodeString(random()), StringField.TYPE_UNSTORED));
document.add(new DoubleField("double", random().nextDouble()));
if (supportsDocValues) {
document.add(new IntDocValuesField("intdocvalues", random().nextInt()));
document.add(new FloatDocValuesField("floatdocvalues", random().nextFloat()));
document.add(new SortedBytesDocValuesField("sortedbytesdocvalues", new BytesRef(_TestUtil.randomRealisticUnicodeString(random()))));
document.add(new SortedBytesDocValuesField("sortedbytesdocvaluesval", new BytesRef(_TestUtil.randomRealisticUnicodeString(random()))));
document.add(new StraightBytesDocValuesField("straightbytesdocvalues", new BytesRef(_TestUtil.randomRealisticUnicodeString(random()))));
}
iw.addDocument(document); iw.addDocument(document);
} }
reader = iw.getReader(); reader = iw.getReader();
@ -63,7 +103,7 @@ public class TestSearchAfter extends LuceneTestCase {
// because the first page has a null 'after', we get a normal collector. // because the first page has a null 'after', we get a normal collector.
// so we need to run the test a few times to ensure we will collect multiple // so we need to run the test a few times to ensure we will collect multiple
// pages. // pages.
int n = atLeast(10); int n = atLeast(20);
for (int i = 0; i < n; i++) { for (int i = 0; i < n; i++) {
Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd"))); Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd")));
assertQuery(new MatchAllDocsQuery(), null); assertQuery(new MatchAllDocsQuery(), null);
@ -78,13 +118,67 @@ public class TestSearchAfter extends LuceneTestCase {
} }
void assertQuery(Query query, Filter filter) throws Exception { void assertQuery(Query query, Filter filter) throws Exception {
assertQuery(query, filter, null);
assertQuery(query, filter, Sort.RELEVANCE);
assertQuery(query, filter, Sort.INDEXORDER);
for(int rev=0;rev<2;rev++) {
boolean reversed = rev == 1;
assertQuery(query, filter, new Sort(new SortField[] {new SortField("byte", SortField.Type.BYTE, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("short", SortField.Type.SHORT, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("int", SortField.Type.INT, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("long", SortField.Type.LONG, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("float", SortField.Type.FLOAT, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("double", SortField.Type.DOUBLE, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("bytes", SortField.Type.STRING, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("bytesval", SortField.Type.STRING_VAL, reversed)}));
if (supportsDocValues) {
assertQuery(query, filter, new Sort(new SortField[] {useDocValues(new SortField("intdocvalues", SortField.Type.INT, reversed))}));
assertQuery(query, filter, new Sort(new SortField[] {useDocValues(new SortField("floatdocvalues", SortField.Type.FLOAT, reversed))}));
assertQuery(query, filter, new Sort(new SortField[] {useDocValues(new SortField("sortedbytesdocvalues", SortField.Type.STRING, reversed))}));
assertQuery(query, filter, new Sort(new SortField[] {useDocValues(new SortField("sortedbytesdocvaluesval", SortField.Type.STRING_VAL, reversed))}));
assertQuery(query, filter, new Sort(new SortField[] {useDocValues(new SortField("straightbytesdocvalues", SortField.Type.STRING_VAL, reversed))}));
}
}
}
void assertQuery(Query query, Filter filter, Sort sort) throws Exception {
int maxDoc = searcher.getIndexReader().maxDoc(); int maxDoc = searcher.getIndexReader().maxDoc();
TopDocs all = searcher.search(query, filter, maxDoc); TopDocs all;
int pageSize = _TestUtil.nextInt(random(), 1, maxDoc*2); int pageSize = _TestUtil.nextInt(random(), 1, maxDoc*2);
if (VERBOSE) {
System.out.println("\nassertQuery: query=" + query + " filter=" + filter + " sort=" + sort + " pageSize=" + pageSize);
}
final boolean doMaxScore = random().nextBoolean();
if (sort == null) {
all = searcher.search(query, filter, maxDoc);
} else if (sort == Sort.RELEVANCE) {
all = searcher.search(query, filter, maxDoc, sort, true, doMaxScore);
} else {
all = searcher.search(query, filter, maxDoc, sort);
}
if (VERBOSE) {
System.out.println(" all.totalHits=" + all.totalHits);
}
int pageStart = 0; int pageStart = 0;
ScoreDoc lastBottom = null; ScoreDoc lastBottom = null;
while (pageStart < all.totalHits) { while (pageStart < all.totalHits) {
TopDocs paged = searcher.searchAfter(lastBottom, query, filter, pageSize); TopDocs paged;
if (sort == null) {
if (VERBOSE) {
System.out.println(" iter lastBottom=" + lastBottom);
}
paged = searcher.searchAfter(lastBottom, query, filter, pageSize);
} else {
if (VERBOSE) {
System.out.println(" iter lastBottom=" + lastBottom + (lastBottom == null ? "" : " fields=" + Arrays.toString(((FieldDoc) lastBottom).fields)));
}
if (sort == Sort.RELEVANCE) {
paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort, true, doMaxScore);
} else {
paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort);
}
}
if (paged.scoreDocs.length == 0) { if (paged.scoreDocs.length == 0) {
break; break;
} }
@ -98,8 +192,14 @@ public class TestSearchAfter extends LuceneTestCase {
static void assertPage(int pageStart, TopDocs all, TopDocs paged) { static void assertPage(int pageStart, TopDocs all, TopDocs paged) {
assertEquals(all.totalHits, paged.totalHits); assertEquals(all.totalHits, paged.totalHits);
for (int i = 0; i < paged.scoreDocs.length; i++) { for (int i = 0; i < paged.scoreDocs.length; i++) {
assertEquals(all.scoreDocs[pageStart + i].doc, paged.scoreDocs[i].doc); ScoreDoc sd1 = all.scoreDocs[pageStart + i];
assertEquals(all.scoreDocs[pageStart + i].score, paged.scoreDocs[i].score, 0f); ScoreDoc sd2 = paged.scoreDocs[i];
assertEquals(sd1.doc, sd2.doc);
assertEquals(sd1.score, sd2.score, 0f);
if (sd1 instanceof FieldDoc) {
assertTrue(sd2 instanceof FieldDoc);
assertEquals(((FieldDoc) sd1).fields, ((FieldDoc) sd2).fields);
}
} }
} }
} }

View File

@ -44,9 +44,11 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
@ -218,7 +220,6 @@ public class TestSort extends LuceneTestCase {
IndexReader reader = writer.getReader(); IndexReader reader = writer.getReader();
writer.close (); writer.close ();
IndexSearcher s = newSearcher(reader); IndexSearcher s = newSearcher(reader);
s.setDefaultFieldSortScoring(true, true);
return s; return s;
} }
@ -734,6 +735,15 @@ public class TestSort extends LuceneTestCase {
public Integer value(int slot) { public Integer value(int slot) {
return Integer.valueOf(slotValues[slot]); return Integer.valueOf(slotValues[slot]);
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) {
final int value = valueObj.intValue();
final int docValue = docValues[doc];
// values are small enough that overflow won't happen
return docValue - value;
}
} }
static class MyFieldComparatorSource extends FieldComparatorSource { static class MyFieldComparatorSource extends FieldComparatorSource {
@ -889,7 +899,7 @@ public class TestSort extends LuceneTestCase {
// try to pick a query that will result in an unnormalized // try to pick a query that will result in an unnormalized
// score greater than 1 to test for correct normalization // score greater than 1 to test for correct normalization
final TopDocs docs1 = full.search(queryE,null,nDocs,sort); final TopDocs docs1 = full.search(queryE,null,nDocs,sort,true,true);
// a filter that only allows through the first hit // a filter that only allows through the first hit
Filter filt = new Filter() { Filter filt = new Filter() {
@ -903,7 +913,7 @@ public class TestSort extends LuceneTestCase {
} }
}; };
TopDocs docs2 = full.search(queryE, filt, nDocs, sort); TopDocs docs2 = full.search(queryE, filt, nDocs, sort,true,true);
assertEquals(docs1.scoreDocs[0].score, docs2.scoreDocs[0].score, 1e-6); assertEquals(docs1.scoreDocs[0].score, docs2.scoreDocs[0].score, 1e-6);
} }
@ -1244,7 +1254,7 @@ public class TestSort extends LuceneTestCase {
String expectedResult) throws IOException { String expectedResult) throws IOException {
//ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs; //ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
TopDocs hits = searcher.search(query, null, Math.max(1, expectedResult.length()), sort); TopDocs hits = searcher.search(query, null, Math.max(1, expectedResult.length()), sort, true, true);
ScoreDoc[] result = hits.scoreDocs; ScoreDoc[] result = hits.scoreDocs;
assertEquals(expectedResult.length(),hits.totalHits); assertEquals(expectedResult.length(),hits.totalHits);
StringBuilder buff = new StringBuilder(10); StringBuilder buff = new StringBuilder(10);
@ -1478,4 +1488,38 @@ public class TestSort extends LuceneTestCase {
r.close(); r.close();
dir.close(); dir.close();
} }
public void testMaxScore() throws Exception {
Directory d = newDirectory();
// Not RIW because we need exactly 2 segs:
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int id = 0;
for(int seg=0;seg<2;seg++) {
for(int docIDX=0;docIDX<10;docIDX++) {
Document doc = new Document();
doc.add(newField("id", ""+docIDX, StringField.TYPE_STORED));
StringBuilder sb = new StringBuilder();
for(int i=0;i<id;i++) {
sb.append(' ');
sb.append("text");
}
doc.add(newField("body", sb.toString(), TextField.TYPE_UNSTORED));
w.addDocument(doc);
id++;
}
w.commit();
}
IndexReader r = DirectoryReader.open(w, true);
w.close();
Query q = new TermQuery(new Term("body", "text"));
IndexSearcher s = newSearcher(r);
float maxScore = s.search(q , 10).getMaxScore();
assertEquals(maxScore, s.search(q, null, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0);
r.close();
d.close();
}
} }

View File

@ -444,7 +444,6 @@ public class TestBlockJoin extends LuceneTestCase {
} }
final IndexSearcher s = newSearcher(r); final IndexSearcher s = newSearcher(r);
s.setDefaultFieldSortScoring(true, true);
final IndexSearcher joinS = newSearcher(joinR); final IndexSearcher joinS = newSearcher(joinR);

View File

@ -64,7 +64,7 @@ public class CustomScoreQuery extends Query {
* computation. This parameter is optional - it can be null. * computation. This parameter is optional - it can be null.
*/ */
public CustomScoreQuery(Query subQuery, Query scoringQuery) { public CustomScoreQuery(Query subQuery, Query scoringQuery) {
this(subQuery, scoringQuery!=null ? // don't want an array that contains a single null.. this(subQuery, scoringQuery!=null ? // don't want an array that contains a single null..
new Query[] {scoringQuery} : new Query[0]); new Query[] {scoringQuery} : new Query[0]);
} }

View File

@ -184,5 +184,18 @@ public abstract class ValueSource {
public Double value(int slot) { public Double value(int slot) {
return values[slot]; return values[slot];
} }
@Override
public int compareDocToValue(int doc, Double valueObj) {
final double value = valueObj.doubleValue();
final double docValue = docVals.doubleVal(doc);
if (docValue < value) {
return -1;
} else if (docValue > value) {
return -1;
} else {
return 0;
}
}
} }
} }

View File

@ -132,7 +132,6 @@ public class TestValueSources extends LuceneTestCase {
reader = iw.getReader(); reader = iw.getReader();
searcher = newSearcher(reader); searcher = newSearcher(reader);
searcher.setDefaultFieldSortScoring(true, true);
iw.close(); iw.close();
} }

View File

@ -118,4 +118,16 @@ public final class SlowCollatedStringComparator extends FieldComparator<String>
return collator.compare(first, second); return collator.compare(first, second);
} }
} }
@Override
public int compareDocToValue(int doc, String value) {
final BytesRef br = currentDocTerms.getTerm(doc, tempBR);
final String docValue;
if (br == null) {
docValue = null;
} else {
docValue = br.utf8ToString();
}
return compareValues(docValue, value);
}
} }

View File

@ -560,9 +560,14 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
public Integer value(int slot) { public Integer value(int slot) {
return values[slot]; return values[slot];
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) throws IOException {
final int value = valueObj.intValue();
final int docValue = docVal(doc);
return docValue - value; // values will be small enough that there is no overflow concern
}
}; };
} }
}
} }
}

View File

@ -138,6 +138,12 @@ public class RandomSortField extends FieldType {
public Integer value(int slot) { public Integer value(int slot) {
return values[slot]; return values[slot];
} }
@Override
public int compareDocToValue(int doc, Integer valueObj) {
// values will be positive... no overflow possible.
return hash(doc+seed) - valueObj.intValue();
}
}; };
} }
}; };

View File

@ -121,6 +121,11 @@ class TermOrdValComparator_SML extends FieldComparator<Comparable> {
return TermOrdValComparator_SML.createComparator(context.reader(), this); return TermOrdValComparator_SML.createComparator(context.reader(), this);
} }
@Override
public int compareDocToValue(int doc, Comparable docValue) {
throw new UnsupportedOperationException();
}
// Base class for specialized (per bit width of the // Base class for specialized (per bit width of the
// ords) per-segment comparator. NOTE: this is messy; // ords) per-segment comparator. NOTE: this is messy;
// we do this only because hotspot can't reliably inline // we do this only because hotspot can't reliably inline
@ -216,6 +221,20 @@ class TermOrdValComparator_SML extends FieldComparator<Comparable> {
public BytesRef value(int slot) { public BytesRef value(int slot) {
return values==null ? parent.NULL_VAL : values[slot]; return values==null ? parent.NULL_VAL : values[slot];
} }
@Override
public int compareDocToValue(int doc, BytesRef value) {
final BytesRef docValue = termsIndex.getTerm(doc, tempBR);
if (docValue == null) {
if (value == null) {
return 0;
}
return 1;
} else if (value == null) {
return -1;
}
return docValue.compareTo(value);
}
} }
// Used per-segment when bit width of doc->ord is 8: // Used per-segment when bit width of doc->ord is 8: