mirror of https://github.com/apache/lucene.git
LUCENE-8430: TopDocs.totalHits may now be a lower bound of the hit count.
This commit is contained in:
parent
3a837ca374
commit
9ca053712a
|
@ -89,3 +89,11 @@ Computing scores at collection time is less efficient than running a second
|
||||||
request in order to only compute scores for documents that made it to the top
|
request in order to only compute scores for documents that made it to the top
|
||||||
hits. As a consequence, the trackDocScores option has been removed and can be
|
hits. As a consequence, the trackDocScores option has been removed and can be
|
||||||
replaced with the new TopFieldCollector#populateScores helper method.
|
replaced with the new TopFieldCollector#populateScores helper method.
|
||||||
|
|
||||||
|
## TopDocs.totalHits is no longer a long ##
|
||||||
|
|
||||||
|
Lucene 8 received optimizations for collection of top-k matches by not visiting
|
||||||
|
all matches. However these optimizations won't help if all matches still need
|
||||||
|
to be visited in order to compute the total number of hits. As a consequence,
|
||||||
|
TopDocs.totalHits is now an TotalHits object that is either an exact hit count
|
||||||
|
or a lower bound of the hit count.
|
||||||
|
|
|
@ -94,7 +94,7 @@ public class DocMakerTest extends BenchmarkTestCase {
|
||||||
IndexReader reader = DirectoryReader.open(runData.getDirectory());
|
IndexReader reader = DirectoryReader.open(runData.getDirectory());
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
TopDocs td = searcher.search(new TermQuery(new Term("key", "value")), 10);
|
TopDocs td = searcher.search(new TermQuery(new Term("key", "value")), 10);
|
||||||
assertEquals(numExpectedResults, td.totalHits);
|
assertEquals(numExpectedResults, td.totalHits.value);
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -154,7 +154,7 @@ public class LineDocSourceTest extends BenchmarkTestCase {
|
||||||
reader = DirectoryReader.open(runData.getDirectory());
|
reader = DirectoryReader.open(runData.getDirectory());
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
TopDocs td = searcher.search(new TermQuery(new Term("body", "body")), 10);
|
TopDocs td = searcher.search(new TermQuery(new Term("body", "body")), 10);
|
||||||
assertEquals(numAdds, td.totalHits);
|
assertEquals(numAdds, td.totalHits.value);
|
||||||
assertNotNull(td.scoreDocs[0]);
|
assertNotNull(td.scoreDocs[0]);
|
||||||
|
|
||||||
if (storedField==null) {
|
if (storedField==null) {
|
||||||
|
|
|
@ -217,7 +217,7 @@ public class BM25NBClassifier implements Classifier<BytesRef> {
|
||||||
builder.add(query, BooleanClause.Occur.MUST);
|
builder.add(query, BooleanClause.Occur.MUST);
|
||||||
}
|
}
|
||||||
TopDocs search = indexSearcher.search(builder.build(), 1);
|
TopDocs search = indexSearcher.search(builder.build(), 1);
|
||||||
return search.totalHits > 0 ? search.scoreDocs[0].score : 1;
|
return search.totalHits.value > 0 ? search.scoreDocs[0].score : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
private double calculateLogPrior(Term term) throws IOException {
|
private double calculateLogPrior(Term term) throws IOException {
|
||||||
|
@ -228,7 +228,7 @@ public class BM25NBClassifier implements Classifier<BytesRef> {
|
||||||
bq.add(query, BooleanClause.Occur.MUST);
|
bq.add(query, BooleanClause.Occur.MUST);
|
||||||
}
|
}
|
||||||
TopDocs topDocs = indexSearcher.search(bq.build(), 1);
|
TopDocs topDocs = indexSearcher.search(bq.build(), 1);
|
||||||
return topDocs.totalHits > 0 ? Math.log(topDocs.scoreDocs[0].score) : 0;
|
return topDocs.totalHits.value > 0 ? Math.log(topDocs.scoreDocs[0].score) : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ public class KNearestFuzzyClassifier implements Classifier<BytesRef> {
|
||||||
private List<ClassificationResult<BytesRef>> buildListFromTopDocs(TopDocs topDocs) throws IOException {
|
private List<ClassificationResult<BytesRef>> buildListFromTopDocs(TopDocs topDocs) throws IOException {
|
||||||
Map<BytesRef, Integer> classCounts = new HashMap<>();
|
Map<BytesRef, Integer> classCounts = new HashMap<>();
|
||||||
Map<BytesRef, Double> classBoosts = new HashMap<>(); // this is a boost based on class ranking positions in topDocs
|
Map<BytesRef, Double> classBoosts = new HashMap<>(); // this is a boost based on class ranking positions in topDocs
|
||||||
float maxScore = topDocs.totalHits == 0 ? Float.NaN : topDocs.scoreDocs[0].score;
|
float maxScore = topDocs.totalHits.value == 0 ? Float.NaN : topDocs.scoreDocs[0].score;
|
||||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||||
IndexableField storableField = indexSearcher.doc(scoreDoc.doc).getField(classFieldName);
|
IndexableField storableField = indexSearcher.doc(scoreDoc.doc).getField(classFieldName);
|
||||||
if (storableField != null) {
|
if (storableField != null) {
|
||||||
|
|
|
@ -190,7 +190,7 @@ public class KNearestNeighborClassifier implements Classifier<BytesRef> {
|
||||||
protected List<ClassificationResult<BytesRef>> buildListFromTopDocs(TopDocs topDocs) throws IOException {
|
protected List<ClassificationResult<BytesRef>> buildListFromTopDocs(TopDocs topDocs) throws IOException {
|
||||||
Map<BytesRef, Integer> classCounts = new HashMap<>();
|
Map<BytesRef, Integer> classCounts = new HashMap<>();
|
||||||
Map<BytesRef, Double> classBoosts = new HashMap<>(); // this is a boost based on class ranking positions in topDocs
|
Map<BytesRef, Double> classBoosts = new HashMap<>(); // this is a boost based on class ranking positions in topDocs
|
||||||
float maxScore = topDocs.totalHits == 0 ? Float.NaN : topDocs.scoreDocs[0].score;
|
float maxScore = topDocs.totalHits.value == 0 ? Float.NaN : topDocs.scoreDocs[0].score;
|
||||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||||
IndexableField[] storableFields = indexSearcher.doc(scoreDoc.doc).getFields(classFieldName);
|
IndexableField[] storableFields = indexSearcher.doc(scoreDoc.doc).getFields(classFieldName);
|
||||||
for (IndexableField singleStorableField : storableFields) {
|
for (IndexableField singleStorableField : storableFields) {
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
|
import org.apache.lucene.search.TotalHits;
|
||||||
import org.apache.lucene.search.grouping.GroupDocs;
|
import org.apache.lucene.search.grouping.GroupDocs;
|
||||||
import org.apache.lucene.search.grouping.GroupingSearch;
|
import org.apache.lucene.search.grouping.GroupingSearch;
|
||||||
import org.apache.lucene.search.grouping.TopGroups;
|
import org.apache.lucene.search.grouping.TopGroups;
|
||||||
|
@ -122,7 +123,8 @@ public class DatasetSplitter {
|
||||||
|
|
||||||
// iterate over existing documents
|
// iterate over existing documents
|
||||||
for (GroupDocs<Object> group : topGroups.groups) {
|
for (GroupDocs<Object> group : topGroups.groups) {
|
||||||
long totalHits = group.totalHits;
|
assert group.totalHits.relation == TotalHits.Relation.EQUAL_TO;
|
||||||
|
long totalHits = group.totalHits.value;
|
||||||
double testSize = totalHits * testRatio;
|
double testSize = totalHits * testRatio;
|
||||||
int tc = 0;
|
int tc = 0;
|
||||||
double cvSize = totalHits * crossValidationRatio;
|
double cvSize = totalHits * crossValidationRatio;
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.apache.lucene.search.TotalHits.Relation;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A {@link Rescorer} that re-sorts according to a provided
|
* A {@link Rescorer} that re-sorts according to a provided
|
||||||
|
@ -98,9 +99,9 @@ public class SortRescorer extends Rescorer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID) throws IOException {
|
public Explanation explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID) throws IOException {
|
||||||
TopDocs oneHit = new TopDocs(1, new ScoreDoc[] {new ScoreDoc(docID, firstPassExplanation.getValue().floatValue())});
|
TopDocs oneHit = new TopDocs(new TotalHits(1, Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(docID, firstPassExplanation.getValue().floatValue())});
|
||||||
TopDocs hits = rescore(searcher, oneHit, 1);
|
TopDocs hits = rescore(searcher, oneHit, 1);
|
||||||
assert hits.totalHits == 1;
|
assert hits.totalHits.value == 1;
|
||||||
|
|
||||||
List<Explanation> subs = new ArrayList<>();
|
List<Explanation> subs = new ArrayList<>();
|
||||||
|
|
||||||
|
|
|
@ -23,13 +23,13 @@ import org.apache.lucene.util.PriorityQueue;
|
||||||
public class TopDocs {
|
public class TopDocs {
|
||||||
|
|
||||||
/** The total number of hits for the query. */
|
/** The total number of hits for the query. */
|
||||||
public long totalHits;
|
public TotalHits totalHits;
|
||||||
|
|
||||||
/** The top hits for the query. */
|
/** The top hits for the query. */
|
||||||
public ScoreDoc[] scoreDocs;
|
public ScoreDoc[] scoreDocs;
|
||||||
|
|
||||||
/** Constructs a TopDocs. */
|
/** Constructs a TopDocs. */
|
||||||
public TopDocs(long totalHits, ScoreDoc[] scoreDocs) {
|
public TopDocs(TotalHits totalHits, ScoreDoc[] scoreDocs) {
|
||||||
this.totalHits = totalHits;
|
this.totalHits = totalHits;
|
||||||
this.scoreDocs = scoreDocs;
|
this.scoreDocs = scoreDocs;
|
||||||
}
|
}
|
||||||
|
@ -246,12 +246,18 @@ public class TopDocs {
|
||||||
}
|
}
|
||||||
|
|
||||||
long totalHitCount = 0;
|
long totalHitCount = 0;
|
||||||
|
TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO;
|
||||||
int availHitCount = 0;
|
int availHitCount = 0;
|
||||||
for(int shardIDX=0;shardIDX<shardHits.length;shardIDX++) {
|
for(int shardIDX=0;shardIDX<shardHits.length;shardIDX++) {
|
||||||
final TopDocs shard = shardHits[shardIDX];
|
final TopDocs shard = shardHits[shardIDX];
|
||||||
// totalHits can be non-zero even if no hits were
|
// totalHits can be non-zero even if no hits were
|
||||||
// collected, when searchAfter was used:
|
// collected, when searchAfter was used:
|
||||||
totalHitCount += shard.totalHits;
|
totalHitCount += shard.totalHits.value;
|
||||||
|
// If any hit count is a lower bound then the merged
|
||||||
|
// total hit count is a lower bound as well
|
||||||
|
if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) {
|
||||||
|
totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||||
|
}
|
||||||
if (shard.scoreDocs != null && shard.scoreDocs.length > 0) {
|
if (shard.scoreDocs != null && shard.scoreDocs.length > 0) {
|
||||||
availHitCount += shard.scoreDocs.length;
|
availHitCount += shard.scoreDocs.length;
|
||||||
queue.add(new ShardRef(shardIDX, setShardIndex == false));
|
queue.add(new ShardRef(shardIDX, setShardIndex == false));
|
||||||
|
@ -292,10 +298,11 @@ public class TopDocs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TotalHits totalHits = new TotalHits(totalHitCount, totalHitsRelation);
|
||||||
if (sort == null) {
|
if (sort == null) {
|
||||||
return new TopDocs(totalHitCount, hits);
|
return new TopDocs(totalHits, hits);
|
||||||
} else {
|
} else {
|
||||||
return new TopFieldDocs(totalHitCount, hits, sort.getSort());
|
return new TopFieldDocs(totalHits, hits, sort.getSort());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ public abstract class TopDocsCollector<T extends ScoreDoc> implements Collector
|
||||||
|
|
||||||
/** This is used in case topDocs() is called with illegal parameters, or there
|
/** This is used in case topDocs() is called with illegal parameters, or there
|
||||||
* simply aren't (enough) results. */
|
* simply aren't (enough) results. */
|
||||||
protected static final TopDocs EMPTY_TOPDOCS = new TopDocs(0, new ScoreDoc[0]);
|
protected static final TopDocs EMPTY_TOPDOCS = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The priority queue which holds the top documents. Note that different
|
* The priority queue which holds the top documents. Note that different
|
||||||
|
@ -47,7 +47,10 @@ public abstract class TopDocsCollector<T extends ScoreDoc> implements Collector
|
||||||
|
|
||||||
/** The total number of documents that the collector encountered. */
|
/** The total number of documents that the collector encountered. */
|
||||||
protected int totalHits;
|
protected int totalHits;
|
||||||
|
|
||||||
|
/** Whether {@link #totalHits} is exact or a lower bound. */
|
||||||
|
protected TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO;
|
||||||
|
|
||||||
protected TopDocsCollector(PriorityQueue<T> pq) {
|
protected TopDocsCollector(PriorityQueue<T> pq) {
|
||||||
this.pq = pq;
|
this.pq = pq;
|
||||||
}
|
}
|
||||||
|
@ -69,7 +72,7 @@ public abstract class TopDocsCollector<T extends ScoreDoc> implements Collector
|
||||||
* topDocs were invalid.
|
* topDocs were invalid.
|
||||||
*/
|
*/
|
||||||
protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
|
protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
|
||||||
return results == null ? EMPTY_TOPDOCS : new TopDocs(totalHits, results);
|
return results == null ? EMPTY_TOPDOCS : new TopDocs(new TotalHits(totalHits, totalHitsRelation), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** The total number of documents that matched this query. */
|
/** The total number of documents that matched this query. */
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.util.List;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.ReaderUtil;
|
import org.apache.lucene.index.ReaderUtil;
|
||||||
import org.apache.lucene.search.FieldValueHitQueue.Entry;
|
import org.apache.lucene.search.FieldValueHitQueue.Entry;
|
||||||
|
import org.apache.lucene.search.TotalHits.Relation;
|
||||||
import org.apache.lucene.util.FutureObjects;
|
import org.apache.lucene.util.FutureObjects;
|
||||||
import org.apache.lucene.util.PriorityQueue;
|
import org.apache.lucene.util.PriorityQueue;
|
||||||
|
|
||||||
|
@ -77,13 +78,6 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
return Arrays.asList(fields1).equals(Arrays.asList(fields2).subList(0, fields1.length));
|
return Arrays.asList(fields1).equals(Arrays.asList(fields2).subList(0, fields1.length));
|
||||||
}
|
}
|
||||||
|
|
||||||
static int estimateRemainingHits(int hitCount, int doc, int maxDoc) {
|
|
||||||
double hitRatio = (double) hitCount / (doc + 1);
|
|
||||||
int remainingDocs = maxDoc - doc - 1;
|
|
||||||
int remainingHits = (int) (remainingDocs * hitRatio);
|
|
||||||
return remainingHits;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Implements a TopFieldCollector over one SortField criteria, with tracking
|
* Implements a TopFieldCollector over one SortField criteria, with tracking
|
||||||
* document scores and maxScore.
|
* document scores and maxScore.
|
||||||
|
@ -111,7 +105,6 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
final boolean canEarlyTerminate = trackTotalHits == false &&
|
final boolean canEarlyTerminate = trackTotalHits == false &&
|
||||||
indexSort != null &&
|
indexSort != null &&
|
||||||
canEarlyTerminate(sort, indexSort);
|
canEarlyTerminate(sort, indexSort);
|
||||||
final int initialTotalHits = totalHits;
|
|
||||||
|
|
||||||
return new MultiComparatorLeafCollector(comparators, reverseMul) {
|
return new MultiComparatorLeafCollector(comparators, reverseMul) {
|
||||||
|
|
||||||
|
@ -124,10 +117,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
// this document is largest than anything else in the queue, and
|
// this document is largest than anything else in the queue, and
|
||||||
// therefore not competitive.
|
// therefore not competitive.
|
||||||
if (canEarlyTerminate) {
|
if (canEarlyTerminate) {
|
||||||
// scale totalHits linearly based on the number of docs
|
totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||||
// and terminate collection
|
|
||||||
totalHits += estimateRemainingHits(totalHits - initialTotalHits, doc, context.reader().maxDoc());
|
|
||||||
earlyTerminated = true;
|
|
||||||
throw new CollectionTerminatedException();
|
throw new CollectionTerminatedException();
|
||||||
} else {
|
} else {
|
||||||
// just move to the next doc
|
// just move to the next doc
|
||||||
|
@ -193,7 +183,6 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
final boolean canEarlyTerminate = trackTotalHits == false &&
|
final boolean canEarlyTerminate = trackTotalHits == false &&
|
||||||
indexSort != null &&
|
indexSort != null &&
|
||||||
canEarlyTerminate(sort, indexSort);
|
canEarlyTerminate(sort, indexSort);
|
||||||
final int initialTotalHits = totalHits;
|
|
||||||
return new MultiComparatorLeafCollector(queue.getComparators(context), queue.getReverseMul()) {
|
return new MultiComparatorLeafCollector(queue.getComparators(context), queue.getReverseMul()) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -209,10 +198,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
if (cmp <= 0) {
|
if (cmp <= 0) {
|
||||||
// not competitive since documents are visited in doc id order
|
// not competitive since documents are visited in doc id order
|
||||||
if (canEarlyTerminate) {
|
if (canEarlyTerminate) {
|
||||||
// scale totalHits linearly based on the number of docs
|
totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||||
// and terminate collection
|
|
||||||
totalHits += estimateRemainingHits(totalHits - initialTotalHits, doc, context.reader().maxDoc());
|
|
||||||
earlyTerminated = true;
|
|
||||||
throw new CollectionTerminatedException();
|
throw new CollectionTerminatedException();
|
||||||
} else {
|
} else {
|
||||||
// just move to the next doc
|
// just move to the next doc
|
||||||
|
@ -261,7 +247,6 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
FieldValueHitQueue.Entry bottom = null;
|
FieldValueHitQueue.Entry bottom = null;
|
||||||
boolean queueFull;
|
boolean queueFull;
|
||||||
int docBase;
|
int docBase;
|
||||||
boolean earlyTerminated = false;
|
|
||||||
final boolean needsScores;
|
final boolean needsScores;
|
||||||
|
|
||||||
// Declaring the constructor private prevents extending this class by anyone
|
// Declaring the constructor private prevents extending this class by anyone
|
||||||
|
@ -423,7 +408,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this is a maxScoring tracking collector and there were no results,
|
// If this is a maxScoring tracking collector and there were no results,
|
||||||
return new TopFieldDocs(totalHits, results, ((FieldValueHitQueue<Entry>) pq).getFields());
|
return new TopFieldDocs(new TotalHits(totalHits, totalHitsRelation), results, ((FieldValueHitQueue<Entry>) pq).getFields());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -433,6 +418,6 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
|
|
||||||
/** Return whether collection terminated early. */
|
/** Return whether collection terminated early. */
|
||||||
public boolean isEarlyTerminated() {
|
public boolean isEarlyTerminated() {
|
||||||
return earlyTerminated;
|
return totalHitsRelation == Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ public class TopFieldDocs extends TopDocs {
|
||||||
* @param scoreDocs The top hits for the query.
|
* @param scoreDocs The top hits for the query.
|
||||||
* @param fields The sort criteria used to find the top hits.
|
* @param fields The sort criteria used to find the top hits.
|
||||||
*/
|
*/
|
||||||
public TopFieldDocs (long totalHits, ScoreDoc[] scoreDocs, SortField[] fields) {
|
public TopFieldDocs (TotalHits totalHits, ScoreDoc[] scoreDocs, SortField[] fields) {
|
||||||
super (totalHits, scoreDocs);
|
super (totalHits, scoreDocs);
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.apache.lucene.search.TotalHits.Relation;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A {@link Collector} implementation that collects the top-scoring hits,
|
* A {@link Collector} implementation that collects the top-scoring hits,
|
||||||
|
@ -49,22 +50,20 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
||||||
|
|
||||||
private static class SimpleTopScoreDocCollector extends TopScoreDocCollector {
|
private static class SimpleTopScoreDocCollector extends TopScoreDocCollector {
|
||||||
|
|
||||||
private final int numHits;
|
|
||||||
private final boolean trackTotalHits;
|
private final boolean trackTotalHits;
|
||||||
private int sumMaxDoc;
|
|
||||||
private int maxCollectedExactly = -1;
|
|
||||||
|
|
||||||
SimpleTopScoreDocCollector(int numHits, boolean trackTotalHits) {
|
SimpleTopScoreDocCollector(int numHits, boolean trackTotalHits) {
|
||||||
super(numHits);
|
super(numHits);
|
||||||
this.numHits = numHits;
|
|
||||||
this.trackTotalHits = trackTotalHits;
|
this.trackTotalHits = trackTotalHits;
|
||||||
|
if (trackTotalHits == false) {
|
||||||
|
totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafCollector getLeafCollector(LeafReaderContext context)
|
public LeafCollector getLeafCollector(LeafReaderContext context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final int docBase = context.docBase;
|
final int docBase = context.docBase;
|
||||||
sumMaxDoc += context.reader().maxDoc();
|
|
||||||
return new ScorerLeafCollector() {
|
return new ScorerLeafCollector() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -100,30 +99,12 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
||||||
// since we tie-break on doc id and collect in doc id order, we can require
|
// since we tie-break on doc id and collect in doc id order, we can require
|
||||||
// the next float
|
// the next float
|
||||||
scorer.setMinCompetitiveScore(Math.nextUp(pqTop.score));
|
scorer.setMinCompetitiveScore(Math.nextUp(pqTop.score));
|
||||||
if (maxCollectedExactly < 0) {
|
|
||||||
assert totalHits == numHits;
|
|
||||||
maxCollectedExactly = doc + docBase;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public TopDocs topDocs() {
|
|
||||||
TopDocs topDocs = super.topDocs();
|
|
||||||
if (trackTotalHits == false && maxCollectedExactly >= 0) {
|
|
||||||
// assume matches are evenly spread in the doc id space
|
|
||||||
// this may be completely off
|
|
||||||
long totalHitsEstimate = (long) numHits * sumMaxDoc / (maxCollectedExactly + 1);
|
|
||||||
// we take the max since the current topDocs.totalHits is a lower bound
|
|
||||||
// of the total hit count
|
|
||||||
topDocs.totalHits = Math.max(topDocs.totalHits, totalHitsEstimate);
|
|
||||||
}
|
|
||||||
return topDocs;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScoreMode scoreMode() {
|
public ScoreMode scoreMode() {
|
||||||
return trackTotalHits ? ScoreMode.COMPLETE : ScoreMode.TOP_SCORES;
|
return trackTotalHits ? ScoreMode.COMPLETE : ScoreMode.TOP_SCORES;
|
||||||
|
@ -148,7 +129,9 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
|
protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
|
||||||
return results == null ? new TopDocs(totalHits, new ScoreDoc[0]) : new TopDocs(totalHits, results);
|
return results == null
|
||||||
|
? new TopDocs(new TotalHits(totalHits, totalHitsRelation), new ScoreDoc[0])
|
||||||
|
: new TopDocs(new TotalHits(totalHits, totalHitsRelation), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -240,7 +223,7 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
||||||
return EMPTY_TOPDOCS;
|
return EMPTY_TOPDOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new TopDocs(totalHits, results);
|
return new TopDocs(new TotalHits(totalHits, totalHitsRelation), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.lucene.search;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Description of the total number of hits of a query. The total hit count
|
||||||
|
* can't generally be computed accurately without visiting all matches, which
|
||||||
|
* is costly for queries that match lots of documents. Given that it is often
|
||||||
|
* enough to have a lower bounds of the number of hits, such as
|
||||||
|
* "there are more than 1000 hits", Lucene has options to stop counting as soon
|
||||||
|
* as a threshold has been reached in order to improve query times.
|
||||||
|
*/
|
||||||
|
public final class TotalHits {
|
||||||
|
|
||||||
|
/** How the {@link TotalHits#value} should be interpreted. */
|
||||||
|
public enum Relation {
|
||||||
|
/**
|
||||||
|
* The total hit count is equal to {@link TotalHits#value}.
|
||||||
|
*/
|
||||||
|
EQUAL_TO,
|
||||||
|
/**
|
||||||
|
* The total hit count is greater than or eual to {@link TotalHits#value}.
|
||||||
|
*/
|
||||||
|
GREATER_THAN_OR_EQUAL_TO
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The value of the total hit count. Must be interpreted in the context of
|
||||||
|
* {@link #relation}.
|
||||||
|
*/
|
||||||
|
public final long value;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether {@link #value} is the exact hit count, in which case
|
||||||
|
* {@link #relation} is equal to {@link Relation#EQUAL_TO}, or a lower bound
|
||||||
|
* of the total hit count, in which case {@link #relation} is equal to
|
||||||
|
* {@link Relation#GREATER_THAN_OR_EQUAL_TO}.
|
||||||
|
*/
|
||||||
|
public final Relation relation;
|
||||||
|
|
||||||
|
/** Sole constructor. */
|
||||||
|
public TotalHits(long value, Relation relation) {
|
||||||
|
if (value < 0) {
|
||||||
|
throw new IllegalArgumentException("value must be >= 0, got " + value);
|
||||||
|
}
|
||||||
|
this.value = value;
|
||||||
|
this.relation = Objects.requireNonNull(relation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return value + (relation == Relation.EQUAL_TO ? "" : "+") + " hits";
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -61,7 +61,7 @@ public class TestDemo extends LuceneTestCase {
|
||||||
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
||||||
Query query = new TermQuery(new Term("fieldname", "text"));
|
Query query = new TermQuery(new Term("fieldname", "text"));
|
||||||
TopDocs hits = isearcher.search(query, 1);
|
TopDocs hits = isearcher.search(query, 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
// Iterate through the results:
|
// Iterate through the results:
|
||||||
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
||||||
Document hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
Document hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
||||||
|
|
|
@ -117,7 +117,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
|
||||||
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
||||||
Query query = new TermQuery(new Term("fieldname", "text"));
|
Query query = new TermQuery(new Term("fieldname", "text"));
|
||||||
TopDocs hits = isearcher.search(query, 1);
|
TopDocs hits = isearcher.search(query, 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
// Iterate through the results:
|
// Iterate through the results:
|
||||||
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
||||||
int hitDocID = hits.scoreDocs[i].doc;
|
int hitDocID = hits.scoreDocs[i].doc;
|
||||||
|
|
|
@ -209,7 +209,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
|
||||||
IndexReader reader = DirectoryReader.open(dir);
|
IndexReader reader = DirectoryReader.open(dir);
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
TopDocs search = searcher.search(new TermQuery(t), num + 10);
|
TopDocs search = searcher.search(new TermQuery(t), num + 10);
|
||||||
assertEquals(num, search.totalHits);
|
assertEquals(num, search.totalHits.value);
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -487,7 +487,7 @@ public class TestField extends LuceneTestCase {
|
||||||
|
|
||||||
IndexSearcher s = newSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
TopDocs hits = s.search(new TermQuery(new Term("binary", br)), 1);
|
TopDocs hits = s.search(new TermQuery(new Term("binary", br)), 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
Document storedDoc = s.doc(hits.scoreDocs[0].doc);
|
Document storedDoc = s.doc(hits.scoreDocs[0].doc);
|
||||||
assertEquals(br, storedDoc.getField("binary").binaryValue());
|
assertEquals(br, storedDoc.getField("binary").binaryValue());
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ public class TestBinaryTerms extends LuceneTestCase {
|
||||||
bytes.bytes[1] = (byte) (255 - i);
|
bytes.bytes[1] = (byte) (255 - i);
|
||||||
bytes.length = 2;
|
bytes.length = 2;
|
||||||
TopDocs docs = is.search(new TermQuery(new Term("bytes", bytes)), 5);
|
TopDocs docs = is.search(new TermQuery(new Term("bytes", bytes)), 5);
|
||||||
assertEquals(1, docs.totalHits);
|
assertEquals(1, docs.totalHits.value);
|
||||||
assertEquals("" + i, is.doc(docs.scoreDocs[0].doc).get("id"));
|
assertEquals("" + i, is.doc(docs.scoreDocs[0].doc).get("id"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -112,7 +112,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
||||||
IndexSearcher indexSearcher = newSearcher(indexReader);
|
IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||||
TopDocs topDocs = indexSearcher.search(new TermQuery(new Term(TEXT_FIELD, "fleas")), 10);
|
TopDocs topDocs = indexSearcher.search(new TermQuery(new Term(TEXT_FIELD, "fleas")), 10);
|
||||||
assertNotNull(topDocs);
|
assertNotNull(topDocs);
|
||||||
assertEquals(expectedTotalHits, topDocs.totalHits);
|
assertEquals(expectedTotalHits, topDocs.totalHits.value);
|
||||||
indexReader.close();
|
indexReader.close();
|
||||||
realDirectory.close();
|
realDirectory.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class TestForTooMuchCloning extends LuceneTestCase {
|
||||||
new BytesRef("\uFFFF"),
|
new BytesRef("\uFFFF"),
|
||||||
true,
|
true,
|
||||||
true), 10);
|
true), 10);
|
||||||
assertTrue(hits.totalHits > 0);
|
assertTrue(hits.totalHits.value > 0);
|
||||||
final int queryCloneCount = dir.getInputCloneCount() - cloneCount;
|
final int queryCloneCount = dir.getInputCloneCount() - cloneCount;
|
||||||
//System.out.println("query clone count=" + queryCloneCount);
|
//System.out.println("query clone count=" + queryCloneCount);
|
||||||
assertTrue("too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount, queryCloneCount < 50);
|
assertTrue("too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount, queryCloneCount < 50);
|
||||||
|
|
|
@ -1534,9 +1534,9 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
TermQuery termQuery = new TermQuery(new Term("id", Integer.toString(i)));
|
TermQuery termQuery = new TermQuery(new Term("id", Integer.toString(i)));
|
||||||
final TopDocs topDocs = searcher.search(termQuery, 1);
|
final TopDocs topDocs = searcher.search(termQuery, 1);
|
||||||
if (deleted.get(i)) {
|
if (deleted.get(i)) {
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
NumericDocValues values = MultiDocValues.getNumericValues(reader, "id");
|
NumericDocValues values = MultiDocValues.getNumericValues(reader, "id");
|
||||||
assertEquals(topDocs.scoreDocs[0].doc, values.advance(topDocs.scoreDocs[0].doc));
|
assertEquals(topDocs.scoreDocs[0].doc, values.advance(topDocs.scoreDocs[0].doc));
|
||||||
assertEquals(i, values.longValue());
|
assertEquals(i, values.longValue());
|
||||||
|
@ -1586,9 +1586,9 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
TermQuery termQuery = new TermQuery(new Term("id", Integer.toString(i)));
|
TermQuery termQuery = new TermQuery(new Term("id", Integer.toString(i)));
|
||||||
final TopDocs topDocs = searcher.search(termQuery, 1);
|
final TopDocs topDocs = searcher.search(termQuery, 1);
|
||||||
if (deleted.get(i)) {
|
if (deleted.get(i)) {
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
NumericDocValues values = MultiDocValues.getNumericValues(reader, "id");
|
NumericDocValues values = MultiDocValues.getNumericValues(reader, "id");
|
||||||
assertEquals(topDocs.scoreDocs[0].doc, values.advance(topDocs.scoreDocs[0].doc));
|
assertEquals(topDocs.scoreDocs[0].doc, values.advance(topDocs.scoreDocs[0].doc));
|
||||||
assertEquals(i, values.longValue());
|
assertEquals(i, values.longValue());
|
||||||
|
@ -1685,9 +1685,9 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1);
|
final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1);
|
||||||
if (values.containsKey(i) == false) {
|
if (values.containsKey(i) == false) {
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
NumericDocValues dvs = MultiDocValues.getNumericValues(reader, "foo");
|
NumericDocValues dvs = MultiDocValues.getNumericValues(reader, "foo");
|
||||||
int docID = topDocs.scoreDocs[0].doc;
|
int docID = topDocs.scoreDocs[0].doc;
|
||||||
assertEquals(docID, dvs.advance(docID));
|
assertEquals(docID, dvs.advance(docID));
|
||||||
|
@ -1807,7 +1807,7 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1);
|
final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
NumericDocValues dvs = MultiDocValues.getNumericValues(reader, "bar");
|
NumericDocValues dvs = MultiDocValues.getNumericValues(reader, "bar");
|
||||||
int hitDoc = topDocs.scoreDocs[0].doc;
|
int hitDoc = topDocs.scoreDocs[0].doc;
|
||||||
assertEquals(hitDoc, dvs.advance(hitDoc));
|
assertEquals(hitDoc, dvs.advance(hitDoc));
|
||||||
|
@ -1865,8 +1865,8 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
Query query = new TermQuery(new Term("id", Integer.toString(i)));
|
Query query = new TermQuery(new Term("id", Integer.toString(i)));
|
||||||
final TopDocs topDocs = searcher.search(query, 1);
|
final TopDocs topDocs = searcher.search(query, 1);
|
||||||
final TopDocs topDocs2 = searcher2.search(query, 1);
|
final TopDocs topDocs2 = searcher2.search(query, 1);
|
||||||
assertEquals(topDocs.totalHits, topDocs2.totalHits);
|
assertEquals(topDocs.totalHits.value, topDocs2.totalHits.value);
|
||||||
if (topDocs.totalHits == 1) {
|
if (topDocs.totalHits.value == 1) {
|
||||||
NumericDocValues dvs1 = MultiDocValues.getNumericValues(reader, "foo");
|
NumericDocValues dvs1 = MultiDocValues.getNumericValues(reader, "foo");
|
||||||
int hitDoc1 = topDocs.scoreDocs[0].doc;
|
int hitDoc1 = topDocs.scoreDocs[0].doc;
|
||||||
assertEquals(hitDoc1, dvs1.advance(hitDoc1));
|
assertEquals(hitDoc1, dvs1.advance(hitDoc1));
|
||||||
|
@ -2334,11 +2334,11 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||||
TopDocs hits2 = c2.topDocs();
|
TopDocs hits2 = c2.topDocs();
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" topDocs query-time sort: totalHits=" + hits1.totalHits);
|
System.out.println(" topDocs query-time sort: totalHits=" + hits1.totalHits.value);
|
||||||
for(ScoreDoc scoreDoc : hits1.scoreDocs) {
|
for(ScoreDoc scoreDoc : hits1.scoreDocs) {
|
||||||
System.out.println(" " + scoreDoc.doc);
|
System.out.println(" " + scoreDoc.doc);
|
||||||
}
|
}
|
||||||
System.out.println(" topDocs index-time sort: totalHits=" + hits2.totalHits);
|
System.out.println(" topDocs index-time sort: totalHits=" + hits2.totalHits.value);
|
||||||
for(ScoreDoc scoreDoc : hits2.scoreDocs) {
|
for(ScoreDoc scoreDoc : hits2.scoreDocs) {
|
||||||
System.out.println(" " + scoreDoc.doc);
|
System.out.println(" " + scoreDoc.doc);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1778,7 +1778,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
builder.add(new Term("body", "test"), 2);
|
builder.add(new Term("body", "test"), 2);
|
||||||
PhraseQuery pq = builder.build();
|
PhraseQuery pq = builder.build();
|
||||||
// body:"just ? test"
|
// body:"just ? test"
|
||||||
assertEquals(1, is.search(pq, 5).totalHits);
|
assertEquals(1, is.search(pq, 5).totalHits.value);
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
@ -1810,7 +1810,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
builder.add(new Term("body", "test"), 3);
|
builder.add(new Term("body", "test"), 3);
|
||||||
PhraseQuery pq = builder.build();
|
PhraseQuery pq = builder.build();
|
||||||
// body:"just ? ? test"
|
// body:"just ? ? test"
|
||||||
assertEquals(1, is.search(pq, 5).totalHits);
|
assertEquals(1, is.search(pq, 5).totalHits.value);
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
@ -3109,7 +3109,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
assertEquals(2, reader.docFreq(new Term("id", "1")));
|
assertEquals(2, reader.docFreq(new Term("id", "1")));
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
Document document = reader.document(topDocs.scoreDocs[0].doc);
|
Document document = reader.document(topDocs.scoreDocs[0].doc);
|
||||||
assertEquals("2", document.get("version"));
|
assertEquals("2", document.get("version"));
|
||||||
|
|
||||||
|
@ -3125,7 +3125,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
oldReader.close();
|
oldReader.close();
|
||||||
searcher = new IndexSearcher(reader);
|
searcher = new IndexSearcher(reader);
|
||||||
topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
document = reader.document(topDocs.scoreDocs[0].doc);
|
document = reader.document(topDocs.scoreDocs[0].doc);
|
||||||
assertEquals("3", document.get("version"));
|
assertEquals("3", document.get("version"));
|
||||||
|
|
||||||
|
@ -3138,7 +3138,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
oldReader.close();
|
oldReader.close();
|
||||||
searcher = new IndexSearcher(reader);
|
searcher = new IndexSearcher(reader);
|
||||||
topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
topDocs = searcher.search(new TermQuery(new Term("id", "1")), 10);
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
int numSoftDeleted = 0;
|
int numSoftDeleted = 0;
|
||||||
for (SegmentCommitInfo info : writer.cloneSegmentInfos()) {
|
for (SegmentCommitInfo info : writer.cloneSegmentInfos()) {
|
||||||
numSoftDeleted += info.getSoftDelCount();
|
numSoftDeleted += info.getSoftDelCount();
|
||||||
|
@ -3261,10 +3261,10 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
for (String id : ids) {
|
for (String id : ids) {
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
|
||||||
if (updateSeveralDocs) {
|
if (updateSeveralDocs) {
|
||||||
assertEquals(2, topDocs.totalHits);
|
assertEquals(2, topDocs.totalHits.value);
|
||||||
assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
|
assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (mixDeletes == false) {
|
if (mixDeletes == false) {
|
||||||
|
|
|
@ -477,7 +477,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
private long getHitCount(Directory dir, Term term) throws IOException {
|
private long getHitCount(Directory dir, Term term) throws IOException {
|
||||||
IndexReader reader = DirectoryReader.open(dir);
|
IndexReader reader = DirectoryReader.open(dir);
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
long hitCount = searcher.search(new TermQuery(term), 1000).totalHits;
|
long hitCount = searcher.search(new TermQuery(term), 1000).totalHits.value;
|
||||||
reader.close();
|
reader.close();
|
||||||
return hitCount;
|
return hitCount;
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,11 +68,11 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase {
|
||||||
assertEquals(IndexWriter.MAX_DOCS, ir.numDocs());
|
assertEquals(IndexWriter.MAX_DOCS, ir.numDocs());
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = new IndexSearcher(ir);
|
||||||
TopDocs hits = searcher.search(new TermQuery(new Term("field", "text")), 10);
|
TopDocs hits = searcher.search(new TermQuery(new Term("field", "text")), 10);
|
||||||
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits);
|
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits.value);
|
||||||
|
|
||||||
// Sort by docID reversed:
|
// Sort by docID reversed:
|
||||||
hits = searcher.search(new TermQuery(new Term("field", "text")), 10, new Sort(new SortField(null, SortField.Type.DOC, true)));
|
hits = searcher.search(new TermQuery(new Term("field", "text")), 10, new Sort(new SortField(null, SortField.Type.DOC, true)));
|
||||||
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits);
|
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits.value);
|
||||||
assertEquals(10, hits.scoreDocs.length);
|
assertEquals(10, hits.scoreDocs.length);
|
||||||
assertEquals(IndexWriter.MAX_DOCS-1, hits.scoreDocs[0].doc);
|
assertEquals(IndexWriter.MAX_DOCS-1, hits.scoreDocs[0].doc);
|
||||||
ir.close();
|
ir.close();
|
||||||
|
|
|
@ -700,7 +700,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
assertEquals(100, r.numDocs());
|
assertEquals(100, r.numDocs());
|
||||||
Query q = new TermQuery(new Term("indexname", "test"));
|
Query q = new TermQuery(new Term("indexname", "test"));
|
||||||
IndexSearcher searcher = newSearcher(r);
|
IndexSearcher searcher = newSearcher(r);
|
||||||
assertEquals(100, searcher.search(q, 10).totalHits);
|
assertEquals(100, searcher.search(q, 10).totalHits.value);
|
||||||
|
|
||||||
expectThrows(AlreadyClosedException.class, () -> {
|
expectThrows(AlreadyClosedException.class, () -> {
|
||||||
DirectoryReader.openIfChanged(r);
|
DirectoryReader.openIfChanged(r);
|
||||||
|
@ -768,7 +768,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
r = r2;
|
r = r2;
|
||||||
Query q = new TermQuery(new Term("indexname", "test"));
|
Query q = new TermQuery(new Term("indexname", "test"));
|
||||||
IndexSearcher searcher = newSearcher(r);
|
IndexSearcher searcher = newSearcher(r);
|
||||||
final long count = searcher.search(q, 10).totalHits;
|
final long count = searcher.search(q, 10).totalHits.value;
|
||||||
assertTrue(count >= lastCount);
|
assertTrue(count >= lastCount);
|
||||||
lastCount = count;
|
lastCount = count;
|
||||||
}
|
}
|
||||||
|
@ -785,7 +785,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
Query q = new TermQuery(new Term("indexname", "test"));
|
Query q = new TermQuery(new Term("indexname", "test"));
|
||||||
IndexSearcher searcher = newSearcher(r);
|
IndexSearcher searcher = newSearcher(r);
|
||||||
final long count = searcher.search(q, 10).totalHits;
|
final long count = searcher.search(q, 10).totalHits.value;
|
||||||
assertTrue(count >= lastCount);
|
assertTrue(count >= lastCount);
|
||||||
|
|
||||||
assertEquals(0, excs.size());
|
assertEquals(0, excs.size());
|
||||||
|
@ -865,7 +865,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
r = r2;
|
r = r2;
|
||||||
Query q = new TermQuery(new Term("indexname", "test"));
|
Query q = new TermQuery(new Term("indexname", "test"));
|
||||||
IndexSearcher searcher = newSearcher(r);
|
IndexSearcher searcher = newSearcher(r);
|
||||||
sum += searcher.search(q, 10).totalHits;
|
sum += searcher.search(q, 10).totalHits.value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -880,7 +880,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
Query q = new TermQuery(new Term("indexname", "test"));
|
Query q = new TermQuery(new Term("indexname", "test"));
|
||||||
IndexSearcher searcher = newSearcher(r);
|
IndexSearcher searcher = newSearcher(r);
|
||||||
sum += searcher.search(q, 10).totalHits;
|
sum += searcher.search(q, 10).totalHits.value;
|
||||||
assertTrue("no documents found at all", sum > 0);
|
assertTrue("no documents found at all", sum > 0);
|
||||||
|
|
||||||
assertEquals(0, excs.size());
|
assertEquals(0, excs.size());
|
||||||
|
@ -966,7 +966,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
.setMergedSegmentWarmer((r) -> {
|
.setMergedSegmentWarmer((r) -> {
|
||||||
IndexSearcher s = newSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
|
TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
|
||||||
assertEquals(20, hits.totalHits);
|
assertEquals(20, hits.totalHits.value);
|
||||||
didWarm.set(true);
|
didWarm.set(true);
|
||||||
})
|
})
|
||||||
.setMergePolicy(newLogMergePolicy(10))
|
.setMergePolicy(newLogMergePolicy(10))
|
||||||
|
|
|
@ -228,7 +228,7 @@ public class TestIndexableField extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
final TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
final TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
final int docID = hits.scoreDocs[0].doc;
|
final int docID = hits.scoreDocs[0].doc;
|
||||||
final Document doc = s.doc(docID);
|
final Document doc = s.doc(docID);
|
||||||
final int endCounter = counter + fieldsPerDoc[id];
|
final int endCounter = counter + fieldsPerDoc[id];
|
||||||
|
@ -298,14 +298,14 @@ public class TestIndexableField extends LuceneTestCase {
|
||||||
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
|
||||||
bq.add(new TermQuery(new Term(name, "text")), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term(name, "text")), BooleanClause.Occur.MUST);
|
||||||
final TopDocs hits2 = s.search(bq.build(), 1);
|
final TopDocs hits2 = s.search(bq.build(), 1);
|
||||||
assertEquals(1, hits2.totalHits);
|
assertEquals(1, hits2.totalHits.value);
|
||||||
assertEquals(docID, hits2.scoreDocs[0].doc);
|
assertEquals(docID, hits2.scoreDocs[0].doc);
|
||||||
|
|
||||||
bq = new BooleanQuery.Builder();
|
bq = new BooleanQuery.Builder();
|
||||||
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
|
||||||
bq.add(new TermQuery(new Term(name, ""+counter)), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term(name, ""+counter)), BooleanClause.Occur.MUST);
|
||||||
final TopDocs hits3 = s.search(bq.build(), 1);
|
final TopDocs hits3 = s.search(bq.build(), 1);
|
||||||
assertEquals(1, hits3.totalHits);
|
assertEquals(1, hits3.totalHits.value);
|
||||||
assertEquals(docID, hits3.scoreDocs[0].doc);
|
assertEquals(docID, hits3.scoreDocs[0].doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -126,7 +126,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||||
DirectoryReader r = w.getReader();
|
DirectoryReader r = w.getReader();
|
||||||
IndexSearcher s = newSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
TopDocs hits = s.search(new TermQuery(id), 1);
|
TopDocs hits = s.search(new TermQuery(id), 1);
|
||||||
assertEquals("maxDoc: " + r.maxDoc(), 1, hits.totalHits);
|
assertEquals("maxDoc: " + r.maxDoc(), 1, hits.totalHits.value);
|
||||||
Document doc = r.document(hits.scoreDocs[0].doc);
|
Document doc = r.document(hits.scoreDocs[0].doc);
|
||||||
assertEquals(maxThread, doc.getField("thread").numericValue().intValue());
|
assertEquals(maxThread, doc.getField("thread").numericValue().intValue());
|
||||||
r.close();
|
r.close();
|
||||||
|
@ -270,7 +270,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||||
TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
||||||
|
|
||||||
if (expectedThreadIDs[id] != -1) {
|
if (expectedThreadIDs[id] != -1) {
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
Document doc = r.document(hits.scoreDocs[0].doc);
|
Document doc = r.document(hits.scoreDocs[0].doc);
|
||||||
int actualThreadID = doc.getField("thread").numericValue().intValue();
|
int actualThreadID = doc.getField("thread").numericValue().intValue();
|
||||||
if (expectedThreadIDs[id] != actualThreadID) {
|
if (expectedThreadIDs[id] != actualThreadID) {
|
||||||
|
@ -284,8 +284,8 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
assertEquals("id=" + id, expectedThreadIDs[id], actualThreadID);
|
assertEquals("id=" + id, expectedThreadIDs[id], actualThreadID);
|
||||||
}
|
}
|
||||||
} else if (hits.totalHits != 0) {
|
} else if (hits.totalHits.value != 0) {
|
||||||
System.out.println("FAIL: id=" + id + " expectedThreadID=" + expectedThreadIDs[id] + " vs totalHits=" + hits.totalHits + " commitSeqNo=" + commitSeqNo + " numThreads=" + numThreads);
|
System.out.println("FAIL: id=" + id + " expectedThreadID=" + expectedThreadIDs[id] + " vs totalHits=" + hits.totalHits.value + " commitSeqNo=" + commitSeqNo + " numThreads=" + numThreads);
|
||||||
for(int threadID=0;threadID<threadOps.size();threadID++) {
|
for(int threadID=0;threadID<threadOps.size();threadID++) {
|
||||||
for(Operation op : threadOps.get(threadID)) {
|
for(Operation op : threadOps.get(threadID)) {
|
||||||
if (id == op.id) {
|
if (id == op.id) {
|
||||||
|
@ -293,7 +293,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assertEquals(0, hits.totalHits);
|
assertEquals(0, hits.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -439,7 +439,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||||
|
|
||||||
// We pre-add all ids up front:
|
// We pre-add all ids up front:
|
||||||
assert expectedThreadIDs[id] != -1;
|
assert expectedThreadIDs[id] != -1;
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
int hitDoc = hits.scoreDocs[0].doc;
|
int hitDoc = hits.scoreDocs[0].doc;
|
||||||
assertEquals(hitDoc, docValues.advance(hitDoc));
|
assertEquals(hitDoc, docValues.advance(hitDoc));
|
||||||
int actualThreadID = (int) docValues.longValue();
|
int actualThreadID = (int) docValues.longValue();
|
||||||
|
|
|
@ -434,7 +434,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
|
||||||
BinaryDocValues binaryIdValues = null;
|
BinaryDocValues binaryIdValues = null;
|
||||||
for (LeafReaderContext c : reader.leaves()) {
|
for (LeafReaderContext c : reader.leaves()) {
|
||||||
TopDocs topDocs = new IndexSearcher(c.reader()).search(new TermQuery(new Term("id", "" + doc)), 10);
|
TopDocs topDocs = new IndexSearcher(c.reader()).search(new TermQuery(new Term("id", "" + doc)), 10);
|
||||||
if (topDocs.totalHits == 1) {
|
if (topDocs.totalHits.value == 1) {
|
||||||
assertNull(idValues);
|
assertNull(idValues);
|
||||||
assertNull(binaryIdValues);
|
assertNull(binaryIdValues);
|
||||||
idValues = c.reader().getNumericDocValues("id");
|
idValues = c.reader().getNumericDocValues("id");
|
||||||
|
@ -442,7 +442,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
|
||||||
binaryIdValues = c.reader().getBinaryDocValues("binaryId");
|
binaryIdValues = c.reader().getBinaryDocValues("binaryId");
|
||||||
assertEquals(topDocs.scoreDocs[0].doc, binaryIdValues.advance(topDocs.scoreDocs[0].doc));
|
assertEquals(topDocs.scoreDocs[0].doc, binaryIdValues.advance(topDocs.scoreDocs[0].doc));
|
||||||
} else {
|
} else {
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -513,7 +513,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
|
||||||
try {
|
try {
|
||||||
Long value = values[i];
|
Long value = values[i];
|
||||||
TopDocs topDocs = new IndexSearcher(reader).search(new TermQuery(new Term("id", "" + i)), 10);
|
TopDocs topDocs = new IndexSearcher(reader).search(new TermQuery(new Term("id", "" + i)), 10);
|
||||||
assertEquals(topDocs.totalHits, 1);
|
assertEquals(topDocs.totalHits.value, 1);
|
||||||
int docID = topDocs.scoreDocs[0].doc;
|
int docID = topDocs.scoreDocs[0].doc;
|
||||||
List<LeafReaderContext> leaves = reader.leaves();
|
List<LeafReaderContext> leaves = reader.leaves();
|
||||||
int subIndex = ReaderUtil.subIndex(docID, leaves);
|
int subIndex = ReaderUtil.subIndex(docID, leaves);
|
||||||
|
@ -540,7 +540,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
|
||||||
do { // retry if we just committing a merge
|
do { // retry if we just committing a merge
|
||||||
try (DirectoryReader reader = writer.getReader()) {
|
try (DirectoryReader reader = writer.getReader()) {
|
||||||
TopDocs topDocs = new IndexSearcher(reader).search(new TermQuery(doc), 10);
|
TopDocs topDocs = new IndexSearcher(reader).search(new TermQuery(doc), 10);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
int theDoc = topDocs.scoreDocs[0].doc;
|
int theDoc = topDocs.scoreDocs[0].doc;
|
||||||
seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
|
seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
|
||||||
}
|
}
|
||||||
|
@ -625,7 +625,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
|
||||||
TopDocs is_live = searcher.search(new DocValuesFieldExistsQuery("is_live"), 5);
|
TopDocs is_live = searcher.search(new DocValuesFieldExistsQuery("is_live"), 5);
|
||||||
assertEquals(numHits, is_live.totalHits);
|
assertEquals(numHits, is_live.totalHits.value);
|
||||||
for (ScoreDoc doc : is_live.scoreDocs) {
|
for (ScoreDoc doc : is_live.scoreDocs) {
|
||||||
int id = Integer.parseInt(reader.document(doc.doc).get("id"));
|
int id = Integer.parseInt(reader.document(doc.doc).get("id"));
|
||||||
int i = ReaderUtil.subIndex(doc.doc, reader.leaves());
|
int i = ReaderUtil.subIndex(doc.doc, reader.leaves());
|
||||||
|
|
|
@ -165,7 +165,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
|
||||||
String id = "doc-" + expect.getKey();
|
String id = "doc-" + expect.getKey();
|
||||||
TopFieldDocs td = searcher.search(new TermQuery(new Term("id", id)), 1,
|
TopFieldDocs td = searcher.search(new TermQuery(new Term("id", id)), 1,
|
||||||
new Sort(new SortField("val", SortField.Type.LONG)));
|
new Sort(new SortField("val", SortField.Type.LONG)));
|
||||||
assertEquals(id + " missing?", 1, td.totalHits);
|
assertEquals(id + " missing?", 1, td.totalHits.value);
|
||||||
assertEquals(id + " value", expect.getValue(), ((FieldDoc)td.scoreDocs[0]).fields[0]);
|
assertEquals(id + " value", expect.getValue(), ((FieldDoc)td.scoreDocs[0]).fields[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class TestReadOnlyIndex extends LuceneTestCase {
|
||||||
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
assertEquals(1, isearcher.count(new TermQuery(new Term("fieldname", longTerm))));
|
||||||
Query query = new TermQuery(new Term("fieldname", "text"));
|
Query query = new TermQuery(new Term("fieldname", "text"));
|
||||||
TopDocs hits = isearcher.search(query, 1);
|
TopDocs hits = isearcher.search(query, 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
// Iterate through the results:
|
// Iterate through the results:
|
||||||
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
||||||
Document hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
Document hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
final boolean doUpdate;
|
final boolean doUpdate;
|
||||||
if (s != null && updateCount < SIZE) {
|
if (s != null && updateCount < SIZE) {
|
||||||
TopDocs hits = s.search(new TermQuery(idTerm), 1);
|
TopDocs hits = s.search(new TermQuery(idTerm), 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
doUpdate = w.tryDeleteDocument(r, hits.scoreDocs[0].doc) == -1;
|
doUpdate = w.tryDeleteDocument(r, hits.scoreDocs[0].doc) == -1;
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
if (doUpdate) {
|
if (doUpdate) {
|
||||||
|
|
|
@ -313,10 +313,10 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
|
||||||
for (String id : ids) {
|
for (String id : ids) {
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
|
||||||
if (updateSeveralDocs) {
|
if (updateSeveralDocs) {
|
||||||
assertEquals(2, topDocs.totalHits);
|
assertEquals(2, topDocs.totalHits.value);
|
||||||
assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
|
assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writer.addDocument(new Document()); // add a dummy doc to trigger a segment here
|
writer.addDocument(new Document()); // add a dummy doc to trigger a segment here
|
||||||
|
@ -355,13 +355,13 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
TopDocs seq_id = searcher.search(IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE), 10);
|
TopDocs seq_id = searcher.search(IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE), 10);
|
||||||
assertTrue(seq_id.totalHits + " hits", seq_id.totalHits >= 50);
|
assertTrue(seq_id.totalHits.value + " hits", seq_id.totalHits.value >= 50);
|
||||||
searcher = new IndexSearcher(reader);
|
searcher = new IndexSearcher(reader);
|
||||||
for (String id : ids) {
|
for (String id : ids) {
|
||||||
if (updateSeveralDocs) {
|
if (updateSeveralDocs) {
|
||||||
assertEquals(2, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits);
|
assertEquals(2, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits.value);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits);
|
assertEquals(1, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
IOUtils.close(reader, writer, dir);
|
IOUtils.close(reader, writer, dir);
|
||||||
|
@ -595,7 +595,7 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
|
||||||
while (true) {
|
while (true) {
|
||||||
try (DirectoryReader reader = writer.getReader()) {
|
try (DirectoryReader reader = writer.getReader()) {
|
||||||
TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(new Term("id", "1")), 1);
|
TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(new Term("id", "1")), 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
if (writer.tryDeleteDocument(reader, topDocs.scoreDocs[0].doc) > 0) {
|
if (writer.tryDeleteDocument(reader, topDocs.scoreDocs[0].doc) > 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -635,7 +635,7 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
|
||||||
do { // retry if we just committing a merge
|
do { // retry if we just committing a merge
|
||||||
try (DirectoryReader reader = writer.getReader()) {
|
try (DirectoryReader reader = writer.getReader()) {
|
||||||
TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(doc), 10);
|
TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(doc), 10);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
int theDoc = topDocs.scoreDocs[0].doc;
|
int theDoc = topDocs.scoreDocs[0].doc;
|
||||||
seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
|
seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,9 +108,9 @@ public class TestStressDeletes extends LuceneTestCase {
|
||||||
int id = ent.getKey();
|
int id = ent.getKey();
|
||||||
TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
|
||||||
if (ent.getValue()) {
|
if (ent.getValue()) {
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(0, hits.totalHits);
|
assertEquals(0, hits.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
r.close();
|
r.close();
|
||||||
|
|
|
@ -340,11 +340,11 @@ public class TestStressNRT extends LuceneTestCase {
|
||||||
Query q = new TermQuery(new Term("id",Integer.toString(id)));
|
Query q = new TermQuery(new Term("id",Integer.toString(id)));
|
||||||
TopDocs results = searcher.search(q, 10);
|
TopDocs results = searcher.search(q, 10);
|
||||||
|
|
||||||
if (results.totalHits == 0 && tombstones) {
|
if (results.totalHits.value == 0 && tombstones) {
|
||||||
// if we couldn't find the doc, look for its tombstone
|
// if we couldn't find the doc, look for its tombstone
|
||||||
q = new TermQuery(new Term("id","-"+Integer.toString(id)));
|
q = new TermQuery(new Term("id","-"+Integer.toString(id)));
|
||||||
results = searcher.search(q, 1);
|
results = searcher.search(q, 1);
|
||||||
if (results.totalHits == 0) {
|
if (results.totalHits.value == 0) {
|
||||||
if (val == -1L) {
|
if (val == -1L) {
|
||||||
// expected... no doc was added yet
|
// expected... no doc was added yet
|
||||||
r.decRef();
|
r.decRef();
|
||||||
|
@ -354,17 +354,17 @@ public class TestStressNRT extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (results.totalHits == 0 && !tombstones) {
|
if (results.totalHits.value == 0 && !tombstones) {
|
||||||
// nothing to do - we can't tell anything from a deleted doc without tombstones
|
// nothing to do - we can't tell anything from a deleted doc without tombstones
|
||||||
} else {
|
} else {
|
||||||
// we should have found the document, or its tombstone
|
// we should have found the document, or its tombstone
|
||||||
if (results.totalHits != 1) {
|
if (results.totalHits.value != 1) {
|
||||||
System.out.println("FAIL: hits id:" + id + " val=" + val);
|
System.out.println("FAIL: hits id:" + id + " val=" + val);
|
||||||
for(ScoreDoc sd : results.scoreDocs) {
|
for(ScoreDoc sd : results.scoreDocs) {
|
||||||
final Document doc = r.document(sd.doc);
|
final Document doc = r.document(sd.doc);
|
||||||
System.out.println(" docID=" + sd.doc + " id:" + doc.get("id") + " foundVal=" + doc.get(field));
|
System.out.println(" docID=" + sd.doc + " id:" + doc.get("id") + " foundVal=" + doc.get(field));
|
||||||
}
|
}
|
||||||
fail("id=" + id + " reader=" + r + " totalHits=" + results.totalHits);
|
fail("id=" + id + " reader=" + r + " totalHits=" + results.totalHits.value);
|
||||||
}
|
}
|
||||||
Document doc = searcher.doc(results.scoreDocs[0].doc);
|
Document doc = searcher.doc(results.scoreDocs[0].doc);
|
||||||
long foundVal = Long.parseLong(doc.get(field));
|
long foundVal = Long.parseLong(doc.get(field));
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
||||||
100);
|
100);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
|
|
||||||
long result;
|
long result;
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
|
@ -111,7 +111,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
||||||
|
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTryDeleteDocumentCloseAndReopen ()
|
public void testTryDeleteDocumentCloseAndReopen ()
|
||||||
|
@ -128,7 +128,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
||||||
100);
|
100);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
|
|
||||||
long result = writer.tryDeleteDocument(DirectoryReader.open(writer), 0);
|
long result = writer.tryDeleteDocument(DirectoryReader.open(writer), 0);
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
||||||
|
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -152,7 +152,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
||||||
|
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,7 +170,7 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "0")),
|
||||||
100);
|
100);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
|
|
||||||
long result = writer.deleteDocuments(new TermQuery(new Term("foo", "0")));
|
long result = writer.deleteDocuments(new TermQuery(new Term("foo", "0")));
|
||||||
|
|
||||||
|
@ -186,6 +186,6 @@ public class TestTryDelete extends LuceneTestCase
|
||||||
|
|
||||||
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
topDocs = searcher.search(new TermQuery(new Term("foo", "0")), 100);
|
||||||
|
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ public class TermInSetQueryTest extends LuceneTestCase {
|
||||||
final int maxDoc = searcher.getIndexReader().maxDoc();
|
final int maxDoc = searcher.getIndexReader().maxDoc();
|
||||||
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
assertEquals(td1.totalHits, td2.totalHits);
|
assertEquals(td1.totalHits.value, td2.totalHits.value);
|
||||||
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
||||||
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
||||||
if (scores) {
|
if (scores) {
|
||||||
|
|
|
@ -89,7 +89,7 @@ public class TestAutomatonQuery extends LuceneTestCase {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: run aq=" + query);
|
System.out.println("TEST: run aq=" + query);
|
||||||
}
|
}
|
||||||
return searcher.search(query, 5).totalHits;
|
return searcher.search(query, 5).totalHits.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertAutomatonHits(int expected, Automaton automaton)
|
private void assertAutomatonHits(int expected, Automaton automaton)
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class TestAutomatonQueryUnicode extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private long automatonQueryNrHits(AutomatonQuery query) throws IOException {
|
private long automatonQueryNrHits(AutomatonQuery query) throws IOException {
|
||||||
return searcher.search(query, 5).totalHits;
|
return searcher.search(query, 5).totalHits.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertAutomatonHits(int expected, Automaton automaton)
|
private void assertAutomatonHits(int expected, Automaton automaton)
|
||||||
|
|
|
@ -108,7 +108,7 @@ public class TestBlendedTermQuery extends LuceneTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(query, 20);
|
TopDocs topDocs = searcher.search(query, 20);
|
||||||
assertEquals(11, topDocs.totalHits);
|
assertEquals(11, topDocs.totalHits.value);
|
||||||
// All docs must have the same score
|
// All docs must have the same score
|
||||||
for (int i = 0; i < topDocs.scoreDocs.length; ++i) {
|
for (int i = 0; i < topDocs.scoreDocs.length; ++i) {
|
||||||
assertEquals(topDocs.scoreDocs[0].score, topDocs.scoreDocs[i].score, 0.0f);
|
assertEquals(topDocs.scoreDocs[0].score, topDocs.scoreDocs[i].score, 0.0f);
|
||||||
|
|
|
@ -255,7 +255,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
|
|
||||||
// sanity check expected num matches in bigSearcher
|
// sanity check expected num matches in bigSearcher
|
||||||
assertEquals(mulFactor * collector.totalHits,
|
assertEquals(mulFactor * collector.totalHits,
|
||||||
bigSearcher.search(query, 1).totalHits);
|
bigSearcher.search(query, 1).totalHits.value);
|
||||||
|
|
||||||
// now check 2 diff scorers from the bigSearcher as well
|
// now check 2 diff scorers from the bigSearcher as well
|
||||||
collector = TopScoreDocCollector.create(topDocsToCheck);
|
collector = TopScoreDocCollector.create(topDocsToCheck);
|
||||||
|
@ -399,7 +399,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
q3.add(q1, BooleanClause.Occur.SHOULD);
|
q3.add(q1, BooleanClause.Occur.SHOULD);
|
||||||
q3.add(new PrefixQuery(new Term("field2", "b")), BooleanClause.Occur.SHOULD);
|
q3.add(new PrefixQuery(new Term("field2", "b")), BooleanClause.Occur.SHOULD);
|
||||||
TopDocs hits4 = bigSearcher.search(q3.build(), 1);
|
TopDocs hits4 = bigSearcher.search(q3.build(), 1);
|
||||||
assertEquals(mulFactor*collector.totalHits + NUM_EXTRA_DOCS/2, hits4.totalHits);
|
assertEquals(mulFactor*collector.totalHits + NUM_EXTRA_DOCS/2, hits4.totalHits.value);
|
||||||
|
|
||||||
// test diff (randomized) scorers produce the same results on bigSearcher as well
|
// test diff (randomized) scorers produce the same results on bigSearcher as well
|
||||||
collector = TopFieldCollector.create(sort, 1000 * mulFactor, false);
|
collector = TopFieldCollector.create(sort, 1000 * mulFactor, false);
|
||||||
|
|
|
@ -359,19 +359,19 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
||||||
private void assertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2) {
|
private void assertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2) {
|
||||||
// The constrained query
|
// The constrained query
|
||||||
// should be a subset to the unconstrained query.
|
// should be a subset to the unconstrained query.
|
||||||
if (top2.totalHits > top1.totalHits) {
|
if (top2.totalHits.value > top1.totalHits.value) {
|
||||||
fail("Constrained results not a subset:\n"
|
fail("Constrained results not a subset:\n"
|
||||||
+ CheckHits.topdocsString(top1,0,0)
|
+ CheckHits.topdocsString(top1,0,0)
|
||||||
+ CheckHits.topdocsString(top2,0,0)
|
+ CheckHits.topdocsString(top2,0,0)
|
||||||
+ "for query:" + q.toString());
|
+ "for query:" + q.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int hit=0; hit<top2.totalHits; hit++) {
|
for (int hit=0; hit<top2.totalHits.value; hit++) {
|
||||||
int id = top2.scoreDocs[hit].doc;
|
int id = top2.scoreDocs[hit].doc;
|
||||||
float score = top2.scoreDocs[hit].score;
|
float score = top2.scoreDocs[hit].score;
|
||||||
boolean found=false;
|
boolean found=false;
|
||||||
// find this doc in other hits
|
// find this doc in other hits
|
||||||
for (int other=0; other<top1.totalHits; other++) {
|
for (int other=0; other<top1.totalHits.value; other++) {
|
||||||
if (top1.scoreDocs[other].doc == id) {
|
if (top1.scoreDocs[other].doc == id) {
|
||||||
found=true;
|
found=true;
|
||||||
float otherScore = top1.scoreDocs[other].score;
|
float otherScore = top1.scoreDocs[other].score;
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
|
|
||||||
private long search(Query q) throws IOException {
|
private long search(Query q) throws IOException {
|
||||||
QueryUtils.check(random(), q,searcher);
|
QueryUtils.check(random(), q,searcher);
|
||||||
return searcher.search(q, 1000).totalHits;
|
return searcher.search(q, 1000).totalHits.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testElements() throws IOException {
|
public void testElements() throws IOException {
|
||||||
|
|
|
@ -202,7 +202,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
// PhraseQuery w/ no terms added returns a null scorer
|
// PhraseQuery w/ no terms added returns a null scorer
|
||||||
PhraseQuery pq = new PhraseQuery("field", new String[0]);
|
PhraseQuery pq = new PhraseQuery("field", new String[0]);
|
||||||
q.add(pq, BooleanClause.Occur.SHOULD);
|
q.add(pq, BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, s.search(q.build(), 10).totalHits);
|
assertEquals(1, s.search(q.build(), 10).totalHits.value);
|
||||||
|
|
||||||
// A required clause which returns null scorer should return null scorer to
|
// A required clause which returns null scorer should return null scorer to
|
||||||
// IndexSearcher.
|
// IndexSearcher.
|
||||||
|
@ -210,12 +210,12 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
pq = new PhraseQuery("field", new String[0]);
|
pq = new PhraseQuery("field", new String[0]);
|
||||||
q.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
q.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
||||||
q.add(pq, BooleanClause.Occur.MUST);
|
q.add(pq, BooleanClause.Occur.MUST);
|
||||||
assertEquals(0, s.search(q.build(), 10).totalHits);
|
assertEquals(0, s.search(q.build(), 10).totalHits.value);
|
||||||
|
|
||||||
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(
|
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(
|
||||||
Arrays.asList(new TermQuery(new Term("field", "a")), pq),
|
Arrays.asList(new TermQuery(new Term("field", "a")), pq),
|
||||||
1.0f);
|
1.0f);
|
||||||
assertEquals(1, s.search(dmq, 10).totalHits);
|
assertEquals(1, s.search(dmq, 10).totalHits.value);
|
||||||
|
|
||||||
r.close();
|
r.close();
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -247,13 +247,13 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
|
|
||||||
MultiReader multireader = new MultiReader(reader1, reader2);
|
MultiReader multireader = new MultiReader(reader1, reader2);
|
||||||
IndexSearcher searcher = newSearcher(multireader);
|
IndexSearcher searcher = newSearcher(multireader);
|
||||||
assertEquals(0, searcher.search(query.build(), 10).totalHits);
|
assertEquals(0, searcher.search(query.build(), 10).totalHits.value);
|
||||||
|
|
||||||
final ExecutorService es = Executors.newCachedThreadPool(new NamedThreadFactory("NRT search threads"));
|
final ExecutorService es = Executors.newCachedThreadPool(new NamedThreadFactory("NRT search threads"));
|
||||||
searcher = new IndexSearcher(multireader, es);
|
searcher = new IndexSearcher(multireader, es);
|
||||||
if (VERBOSE)
|
if (VERBOSE)
|
||||||
System.out.println("rewritten form: " + searcher.rewrite(query.build()));
|
System.out.println("rewritten form: " + searcher.rewrite(query.build()));
|
||||||
assertEquals(0, searcher.search(query.build(), 10).totalHits);
|
assertEquals(0, searcher.search(query.build(), 10).totalHits.value);
|
||||||
es.shutdown();
|
es.shutdown();
|
||||||
es.awaitTermination(1, TimeUnit.SECONDS);
|
es.awaitTermination(1, TimeUnit.SECONDS);
|
||||||
|
|
||||||
|
@ -420,7 +420,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
|
|
||||||
// No doc can match: BQ has only 2 clauses and we are asking for minShouldMatch=4
|
// No doc can match: BQ has only 2 clauses and we are asking for minShouldMatch=4
|
||||||
bq.setMinimumNumberShouldMatch(4);
|
bq.setMinimumNumberShouldMatch(4);
|
||||||
assertEquals(0, s.search(bq.build(), 1).totalHits);
|
assertEquals(0, s.search(bq.build(), 1).totalHits.value);
|
||||||
r.close();
|
r.close();
|
||||||
w.close();
|
w.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
|
|
@ -413,7 +413,7 @@ public class TestBooleanRewrites extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertEquals(TopDocs td1, TopDocs td2) {
|
private void assertEquals(TopDocs td1, TopDocs td2) {
|
||||||
assertEquals(td1.totalHits, td2.totalHits);
|
assertEquals(td1.totalHits.value, td2.totalHits.value);
|
||||||
assertEquals(td1.scoreDocs.length, td2.scoreDocs.length);
|
assertEquals(td1.scoreDocs.length, td2.scoreDocs.length);
|
||||||
Map<Integer, Float> expectedScores = Arrays.stream(td1.scoreDocs).collect(Collectors.toMap(sd -> sd.doc, sd -> sd.score));
|
Map<Integer, Float> expectedScores = Arrays.stream(td1.scoreDocs).collect(Collectors.toMap(sd -> sd.doc, sd -> sd.score));
|
||||||
Set<Integer> actualResultSet = Arrays.stream(td2.scoreDocs).map(sd -> sd.doc).collect(Collectors.toSet());
|
Set<Integer> actualResultSet = Arrays.stream(td2.scoreDocs).map(sd -> sd.doc).collect(Collectors.toSet());
|
||||||
|
|
|
@ -149,7 +149,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
q2.add(q1.build(), BooleanClause.Occur.SHOULD);
|
q2.add(q1.build(), BooleanClause.Occur.SHOULD);
|
||||||
q2.add(new CrazyMustUseBulkScorerQuery(), BooleanClause.Occur.SHOULD);
|
q2.add(new CrazyMustUseBulkScorerQuery(), BooleanClause.Occur.SHOULD);
|
||||||
|
|
||||||
assertEquals(1, s.search(q2.build(), 10).totalHits);
|
assertEquals(1, s.search(q2.build(), 10).totalHits.value);
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class TestConjunctions extends LuceneTestCase {
|
||||||
bq.add(new TermQuery(new Term(F1, "nutch")), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term(F1, "nutch")), BooleanClause.Occur.MUST);
|
||||||
bq.add(new TermQuery(new Term(F2, "is")), BooleanClause.Occur.MUST);
|
bq.add(new TermQuery(new Term(F2, "is")), BooleanClause.Occur.MUST);
|
||||||
TopDocs td = searcher.search(bq.build(), 3);
|
TopDocs td = searcher.search(bq.build(), 3);
|
||||||
assertEquals(1, td.totalHits);
|
assertEquals(1, td.totalHits.value);
|
||||||
assertEquals(3F, td.scoreDocs[0].score, 0.001F); // f1:nutch + f2:is + f2:is
|
assertEquals(3F, td.scoreDocs[0].score, 0.001F); // f1:nutch + f2:is + f2:is
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
assertEquals(docs.size(), s.search(new TermQuery(id), 10).totalHits);
|
assertEquals(docs.size(), s.search(new TermQuery(id), 10).totalHits.value);
|
||||||
} finally {
|
} finally {
|
||||||
nrtDeletes.release(s);
|
nrtDeletes.release(s);
|
||||||
}
|
}
|
||||||
|
@ -131,7 +131,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
System.out.println(Thread.currentThread().getName() + ": nrt: got noDeletes searcher=" + s);
|
System.out.println(Thread.currentThread().getName() + ": nrt: got noDeletes searcher=" + s);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
assertEquals(docs.size(), s.search(new TermQuery(id), 10).totalHits);
|
assertEquals(docs.size(), s.search(new TermQuery(id), 10).totalHits.value);
|
||||||
} finally {
|
} finally {
|
||||||
nrtNoDeletes.release(s);
|
nrtNoDeletes.release(s);
|
||||||
}
|
}
|
||||||
|
@ -155,7 +155,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
System.out.println(Thread.currentThread().getName() + ": nrt: got noDeletes searcher=" + s);
|
System.out.println(Thread.currentThread().getName() + ": nrt: got noDeletes searcher=" + s);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
assertEquals(1, s.search(new TermQuery(id), 10).totalHits);
|
assertEquals(1, s.search(new TermQuery(id), 10).totalHits.value);
|
||||||
} finally {
|
} finally {
|
||||||
nrtNoDeletes.release(s);
|
nrtNoDeletes.release(s);
|
||||||
}
|
}
|
||||||
|
@ -178,7 +178,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
assertEquals(1, s.search(new TermQuery(id), 10).totalHits);
|
assertEquals(1, s.search(new TermQuery(id), 10).totalHits.value);
|
||||||
} finally {
|
} finally {
|
||||||
nrtDeletes.release(s);
|
nrtDeletes.release(s);
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
System.out.println(Thread.currentThread().getName() + ": nrt: got deletes searcher=" + s);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
assertEquals(0, s.search(new TermQuery(id), 10).totalHits);
|
assertEquals(0, s.search(new TermQuery(id), 10).totalHits.value);
|
||||||
} finally {
|
} finally {
|
||||||
nrtDeletes.release(s);
|
nrtDeletes.release(s);
|
||||||
}
|
}
|
||||||
|
@ -524,7 +524,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
|
||||||
IndexSearcher searcher = sm.acquire();
|
IndexSearcher searcher = sm.acquire();
|
||||||
TopDocs td = searcher.search(new TermQuery(new Term("count", i + "")), 10);
|
TopDocs td = searcher.search(new TermQuery(new Term("count", i + "")), 10);
|
||||||
sm.release(searcher);
|
sm.release(searcher);
|
||||||
assertEquals(1, td.totalHits);
|
assertEquals(1, td.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
for(Thread commitThread : commitThreads) {
|
for(Thread commitThread : commitThreads) {
|
||||||
|
|
|
@ -175,7 +175,7 @@ public class TestDocValuesQueries extends LuceneTestCase {
|
||||||
final int maxDoc = searcher.getIndexReader().maxDoc();
|
final int maxDoc = searcher.getIndexReader().maxDoc();
|
||||||
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
assertEquals(td1.totalHits, td2.totalHits);
|
assertEquals(td1.totalHits.value, td2.totalHits.value);
|
||||||
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
||||||
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
||||||
if (scores) {
|
if (scores) {
|
||||||
|
|
|
@ -169,7 +169,7 @@ public class TestDoubleValuesSource extends LuceneTestCase {
|
||||||
|
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
||||||
if (size < actual.totalHits) {
|
if (size < actual.totalHits.value) {
|
||||||
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
||||||
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
|
@ -202,7 +202,7 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
||||||
final int maxDoc = searcher.getIndexReader().maxDoc();
|
final int maxDoc = searcher.getIndexReader().maxDoc();
|
||||||
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
assertEquals(td1.totalHits, td2.totalHits);
|
assertEquals(td1.totalHits.value, td2.totalHits.value);
|
||||||
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
||||||
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
||||||
if (scores) {
|
if (scores) {
|
||||||
|
|
|
@ -367,7 +367,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
IndexSearcher searcher = newSearcher(mr);
|
IndexSearcher searcher = newSearcher(mr);
|
||||||
FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
|
FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
|
||||||
TopDocs docs = searcher.search(fq, 2);
|
TopDocs docs = searcher.search(fq, 2);
|
||||||
assertEquals(5, docs.totalHits); // 5 docs, from the a and b's
|
assertEquals(5, docs.totalHits.value); // 5 docs, from the a and b's
|
||||||
mr.close();
|
mr.close();
|
||||||
ir1.close();
|
ir1.close();
|
||||||
ir2.close();
|
ir2.close();
|
||||||
|
|
|
@ -165,7 +165,7 @@ public class TestIndexSearcher extends LuceneTestCase {
|
||||||
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
|
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
|
||||||
.build()
|
.build()
|
||||||
)) {
|
)) {
|
||||||
assertEquals(searcher.count(query), searcher.search(query, 1).totalHits);
|
assertEquals(searcher.count(query), searcher.search(query, 1).totalHits.value);
|
||||||
}
|
}
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -148,7 +148,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
||||||
TotalHitCountCollector collector = new TotalHitCountCollector();
|
TotalHitCountCollector collector = new TotalHitCountCollector();
|
||||||
searcher.search(q, collector); // will use the cache
|
searcher.search(q, collector); // will use the cache
|
||||||
final int totalHits1 = collector.getTotalHits();
|
final int totalHits1 = collector.getTotalHits();
|
||||||
final long totalHits2 = searcher.search(q, 1).totalHits; // will not use the cache because of scores
|
final long totalHits2 = searcher.search(q, 1).totalHits.value; // will not use the cache because of scores
|
||||||
assertEquals(totalHits2, totalHits1);
|
assertEquals(totalHits2, totalHits1);
|
||||||
} finally {
|
} finally {
|
||||||
mgr.release(searcher);
|
mgr.release(searcher);
|
||||||
|
|
|
@ -62,8 +62,8 @@ public class TestLiveFieldValues extends LuceneTestCase {
|
||||||
protected Integer lookupFromSearcher(IndexSearcher s, String id) throws IOException {
|
protected Integer lookupFromSearcher(IndexSearcher s, String id) throws IOException {
|
||||||
TermQuery tq = new TermQuery(new Term("id", id));
|
TermQuery tq = new TermQuery(new Term("id", id));
|
||||||
TopDocs hits = s.search(tq, 1);
|
TopDocs hits = s.search(tq, 1);
|
||||||
assertTrue(hits.totalHits <= 1);
|
assertTrue(hits.totalHits.value <= 1);
|
||||||
if (hits.totalHits == 0) {
|
if (hits.totalHits.value == 0) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
Document doc = s.doc(hits.scoreDocs[0].doc);
|
Document doc = s.doc(hits.scoreDocs[0].doc);
|
||||||
|
|
|
@ -141,7 +141,7 @@ public class TestLongValuesSource extends LuceneTestCase {
|
||||||
|
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
||||||
if (size < actual.totalHits) {
|
if (size < actual.totalHits.value) {
|
||||||
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
||||||
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
|
@ -376,10 +376,10 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
||||||
mpqb.add(new Term[] {new Term("field", "b"), new Term("field", "c")}, 0);
|
mpqb.add(new Term[] {new Term("field", "b"), new Term("field", "c")}, 0);
|
||||||
}
|
}
|
||||||
TopDocs hits = s.search(mpqb.build(), 2);
|
TopDocs hits = s.search(mpqb.build(), 2);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals(hits.scoreDocs[0].score, hits.scoreDocs[1].score, 1e-5);
|
assertEquals(hits.scoreDocs[0].score, hits.scoreDocs[1].score, 1e-5);
|
||||||
/*
|
/*
|
||||||
for(int hit=0;hit<hits.totalHits;hit++) {
|
for(int hit=0;hit<hits.totalHits.value;hit++) {
|
||||||
ScoreDoc sd = hits.scoreDocs[hit];
|
ScoreDoc sd = hits.scoreDocs[hit];
|
||||||
System.out.println(" hit doc=" + sd.doc + " score=" + sd.score);
|
System.out.println(" hit doc=" + sd.doc + " score=" + sd.score);
|
||||||
}
|
}
|
||||||
|
@ -463,10 +463,10 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
TopDocs hits = s.search(q, 1);
|
TopDocs hits = s.search(q, 1);
|
||||||
assertEquals("wrong number of results", nExpected, hits.totalHits);
|
assertEquals("wrong number of results", nExpected, hits.totalHits.value);
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
for(int hit=0;hit<hits.totalHits;hit++) {
|
for(int hit=0;hit<hits.totalHits.value;hit++) {
|
||||||
ScoreDoc sd = hits.scoreDocs[hit];
|
ScoreDoc sd = hits.scoreDocs[hit];
|
||||||
System.out.println(" hit doc=" + sd.doc + " score=" + sd.score);
|
System.out.println(" hit doc=" + sd.doc + " score=" + sd.score);
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,26 +64,26 @@ public class TestNeedsScores extends LuceneTestCase {
|
||||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||||
bq.add(new AssertNeedsScores(required, ScoreMode.COMPLETE), BooleanClause.Occur.MUST);
|
bq.add(new AssertNeedsScores(required, ScoreMode.COMPLETE), BooleanClause.Occur.MUST);
|
||||||
bq.add(new AssertNeedsScores(prohibited, ScoreMode.COMPLETE_NO_SCORES), BooleanClause.Occur.MUST_NOT);
|
bq.add(new AssertNeedsScores(prohibited, ScoreMode.COMPLETE_NO_SCORES), BooleanClause.Occur.MUST_NOT);
|
||||||
assertEquals(4, searcher.search(bq.build(), 5).totalHits); // we exclude 3
|
assertEquals(4, searcher.search(bq.build(), 5).totalHits.value); // we exclude 3
|
||||||
}
|
}
|
||||||
|
|
||||||
/** nested inside constant score query */
|
/** nested inside constant score query */
|
||||||
public void testConstantScoreQuery() throws Exception {
|
public void testConstantScoreQuery() throws Exception {
|
||||||
Query term = new TermQuery(new Term("field", "this"));
|
Query term = new TermQuery(new Term("field", "this"));
|
||||||
Query constantScore = new ConstantScoreQuery(new AssertNeedsScores(term, ScoreMode.COMPLETE_NO_SCORES));
|
Query constantScore = new ConstantScoreQuery(new AssertNeedsScores(term, ScoreMode.COMPLETE_NO_SCORES));
|
||||||
assertEquals(5, searcher.search(constantScore, 5).totalHits);
|
assertEquals(5, searcher.search(constantScore, 5).totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** when not sorting by score */
|
/** when not sorting by score */
|
||||||
public void testSortByField() throws Exception {
|
public void testSortByField() throws Exception {
|
||||||
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
|
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
|
||||||
assertEquals(5, searcher.search(query, 5, Sort.INDEXORDER).totalHits);
|
assertEquals(5, searcher.search(query, 5, Sort.INDEXORDER).totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** when sorting by score */
|
/** when sorting by score */
|
||||||
public void testSortByScore() throws Exception {
|
public void testSortByScore() throws Exception {
|
||||||
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), ScoreMode.COMPLETE);
|
Query query = new AssertNeedsScores(new MatchAllDocsQuery(), ScoreMode.COMPLETE);
|
||||||
assertEquals(5, searcher.search(query, 5, Sort.RELEVANCE).totalHits);
|
assertEquals(5, searcher.search(query, 5, Sort.RELEVANCE).totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -186,7 +186,7 @@ public class TestNormsFieldExistsQuery extends LuceneTestCase {
|
||||||
final int maxDoc = searcher.getIndexReader().maxDoc();
|
final int maxDoc = searcher.getIndexReader().maxDoc();
|
||||||
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
|
||||||
assertEquals(td1.totalHits, td2.totalHits);
|
assertEquals(td1.totalHits.value, td2.totalHits.value);
|
||||||
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
for (int i = 0; i < td1.scoreDocs.length; ++i) {
|
||||||
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
|
||||||
if (scores) {
|
if (scores) {
|
||||||
|
|
|
@ -108,7 +108,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
TopDocs td = tdc.topDocs();
|
TopDocs td = tdc.topDocs();
|
||||||
ScoreDoc[] sd = td.scoreDocs;
|
ScoreDoc[] sd = td.scoreDocs;
|
||||||
assertEquals(numPositiveScores, td.totalHits);
|
assertEquals(numPositiveScores, td.totalHits.value);
|
||||||
for (int i = 0; i < sd.length; i++) {
|
for (int i = 0; i < sd.length; i++) {
|
||||||
assertTrue("only positive scores should return: " + sd[i].score, sd[i].score > 0);
|
assertTrue("only positive scores should return: " + sd[i].score, sd[i].score > 0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,12 +85,12 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
||||||
public void testPrefixQuery() throws Exception {
|
public void testPrefixQuery() throws Exception {
|
||||||
Query query = new PrefixQuery(new Term(FIELD, "tang"));
|
Query query = new PrefixQuery(new Term(FIELD, "tang"));
|
||||||
assertEquals("Number of matched documents", 2,
|
assertEquals("Number of matched documents", 2,
|
||||||
searcher.search(query, 1000).totalHits);
|
searcher.search(query, 1000).totalHits.value);
|
||||||
}
|
}
|
||||||
public void testTermQuery() throws Exception {
|
public void testTermQuery() throws Exception {
|
||||||
Query query = new TermQuery(new Term(FIELD, "tangfulin"));
|
Query query = new TermQuery(new Term(FIELD, "tangfulin"));
|
||||||
assertEquals("Number of matched documents", 2,
|
assertEquals("Number of matched documents", 2,
|
||||||
searcher.search(query, 1000).totalHits);
|
searcher.search(query, 1000).totalHits.value);
|
||||||
}
|
}
|
||||||
public void testTermBooleanQuery() throws Exception {
|
public void testTermBooleanQuery() throws Exception {
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
|
@ -99,7 +99,7 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
||||||
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
|
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
|
||||||
BooleanClause.Occur.SHOULD);
|
BooleanClause.Occur.SHOULD);
|
||||||
assertEquals("Number of matched documents", 2,
|
assertEquals("Number of matched documents", 2,
|
||||||
searcher.search(query.build(), 1000).totalHits);
|
searcher.search(query.build(), 1000).totalHits.value);
|
||||||
|
|
||||||
}
|
}
|
||||||
public void testPrefixBooleanQuery() throws Exception {
|
public void testPrefixBooleanQuery() throws Exception {
|
||||||
|
@ -109,6 +109,6 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
||||||
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
|
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
|
||||||
BooleanClause.Occur.SHOULD);
|
BooleanClause.Occur.SHOULD);
|
||||||
assertEquals("Number of matched documents", 2,
|
assertEquals("Number of matched documents", 2,
|
||||||
searcher.search(query.build(), 1000).totalHits);
|
searcher.search(query.build(), 1000).totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class TestPrefixQuery extends LuceneTestCase {
|
||||||
PrefixQuery query = new PrefixQuery(new Term("field", ""));
|
PrefixQuery query = new PrefixQuery(new Term("field", ""));
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
assertEquals(1, searcher.search(query, 1000).totalHits);
|
assertEquals(1, searcher.search(query, 1000).totalHits.value);
|
||||||
writer.close();
|
writer.close();
|
||||||
reader.close();
|
reader.close();
|
||||||
directory.close();
|
directory.close();
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
searcher.setSimilarity(new ClassicSimilarity());
|
searcher.setSimilarity(new ClassicSimilarity());
|
||||||
|
|
||||||
TopDocs hits = searcher.search(bq.build(), 10);
|
TopDocs hits = searcher.search(bq.build(), 10);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, pq, 2.0, 10);
|
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, pq, 2.0, 10);
|
||||||
|
|
||||||
// Resorting changed the order:
|
// Resorting changed the order:
|
||||||
assertEquals(2, hits2.totalHits);
|
assertEquals(2, hits2.totalHits.value);
|
||||||
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits3 = QueryRescorer.rescore(searcher, hits, snq, 2.0, 10);
|
TopDocs hits3 = QueryRescorer.rescore(searcher, hits, snq, 2.0, 10);
|
||||||
|
|
||||||
// Resorting changed the order:
|
// Resorting changed the order:
|
||||||
assertEquals(2, hits3.totalHits);
|
assertEquals(2, hits3.totalHits.value);
|
||||||
assertEquals("1", searcher.doc(hits3.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(hits3.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("0", searcher.doc(hits3.scoreDocs[1].doc).get("id"));
|
assertEquals("0", searcher.doc(hits3.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
searcher.setSimilarity(new ClassicSimilarity());
|
searcher.setSimilarity(new ClassicSimilarity());
|
||||||
|
|
||||||
TopDocs hits = searcher.search(bq.build(), 10);
|
TopDocs hits = searcher.search(bq.build(), 10);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -143,7 +143,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, tq, 2.0, 10);
|
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, tq, 2.0, 10);
|
||||||
|
|
||||||
// Just testing that null scorer is handled.
|
// Just testing that null scorer is handled.
|
||||||
assertEquals(2, hits2.totalHits);
|
assertEquals(2, hits2.totalHits.value);
|
||||||
|
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -172,7 +172,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
IndexSearcher searcher = getSearcher(r);
|
IndexSearcher searcher = getSearcher(r);
|
||||||
|
|
||||||
TopDocs hits = searcher.search(bq.build(), 10);
|
TopDocs hits = searcher.search(bq.build(), 10);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -192,7 +192,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
}.rescore(searcher, hits, 10);
|
}.rescore(searcher, hits, 10);
|
||||||
|
|
||||||
// Resorting didn't change the order:
|
// Resorting didn't change the order:
|
||||||
assertEquals(2, hits2.totalHits);
|
assertEquals(2, hits2.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -223,7 +223,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
IndexSearcher searcher = getSearcher(r);
|
IndexSearcher searcher = getSearcher(r);
|
||||||
|
|
||||||
TopDocs hits = searcher.search(bq.build(), 10);
|
TopDocs hits = searcher.search(bq.build(), 10);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -244,7 +244,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits2 = rescorer.rescore(searcher, hits, 10);
|
TopDocs hits2 = rescorer.rescore(searcher, hits, 10);
|
||||||
|
|
||||||
// Resorting changed the order:
|
// Resorting changed the order:
|
||||||
assertEquals(2, hits2.totalHits);
|
assertEquals(2, hits2.totalHits.value);
|
||||||
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -298,7 +298,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
IndexSearcher searcher = getSearcher(r);
|
IndexSearcher searcher = getSearcher(r);
|
||||||
|
|
||||||
TopDocs hits = searcher.search(bq.build(), 10);
|
TopDocs hits = searcher.search(bq.build(), 10);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -308,7 +308,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, pq, 2.0, 10);
|
TopDocs hits2 = QueryRescorer.rescore(searcher, hits, pq, 2.0, 10);
|
||||||
|
|
||||||
// Resorting changed the order:
|
// Resorting changed the order:
|
||||||
assertEquals(2, hits2.totalHits);
|
assertEquals(2, hits2.totalHits.value);
|
||||||
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(hits2.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
assertEquals("0", searcher.doc(hits2.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
@ -320,7 +320,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
||||||
TopDocs hits3 = QueryRescorer.rescore(searcher, hits, snq, 2.0, 10);
|
TopDocs hits3 = QueryRescorer.rescore(searcher, hits, snq, 2.0, 10);
|
||||||
|
|
||||||
// Resorting changed the order:
|
// Resorting changed the order:
|
||||||
assertEquals(2, hits3.totalHits);
|
assertEquals(2, hits3.totalHits.value);
|
||||||
assertEquals("1", searcher.doc(hits3.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(hits3.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("0", searcher.doc(hits3.scoreDocs[1].doc).get("id"));
|
assertEquals("0", searcher.doc(hits3.scoreDocs[1].doc).get("id"));
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class TestRegexpQuery extends LuceneTestCase {
|
||||||
|
|
||||||
private long regexQueryNrHits(String regex) throws IOException {
|
private long regexQueryNrHits(String regex) throws IOException {
|
||||||
RegexpQuery query = new RegexpQuery(newTerm(regex));
|
RegexpQuery query = new RegexpQuery(newTerm(regex));
|
||||||
return searcher.search(query, 5).totalHits;
|
return searcher.count(query);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegex1() throws IOException {
|
public void testRegex1() throws IOException {
|
||||||
|
@ -112,7 +112,7 @@ public class TestRegexpQuery extends LuceneTestCase {
|
||||||
};
|
};
|
||||||
RegexpQuery query = new RegexpQuery(newTerm("<quickBrown>"), RegExp.ALL,
|
RegexpQuery query = new RegexpQuery(newTerm("<quickBrown>"), RegExp.ALL,
|
||||||
myProvider, DEFAULT_MAX_DETERMINIZED_STATES);
|
myProvider, DEFAULT_MAX_DETERMINIZED_STATES);
|
||||||
assertEquals(1, searcher.search(query, 5).totalHits);
|
assertEquals(1, searcher.search(query, 5).totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -90,7 +90,7 @@ public class TestRegexpRandom extends LuceneTestCase {
|
||||||
private void assertPatternHits(String pattern, int numHits) throws Exception {
|
private void assertPatternHits(String pattern, int numHits) throws Exception {
|
||||||
Query wq = new RegexpQuery(new Term("field", fillPattern(pattern)));
|
Query wq = new RegexpQuery(new Term("field", fillPattern(pattern)));
|
||||||
TopDocs docs = searcher.search(wq, 25);
|
TopDocs docs = searcher.search(wq, 25);
|
||||||
assertEquals("Incorrect hits for pattern: " + pattern, numHits, docs.totalHits);
|
assertEquals("Incorrect hits for pattern: " + pattern, numHits, docs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,7 +97,7 @@ public class TestSameScoresWithThreads extends LuceneTestCase {
|
||||||
for(Map.Entry<BytesRef,TopDocs> ent : shuffled) {
|
for(Map.Entry<BytesRef,TopDocs> ent : shuffled) {
|
||||||
TopDocs actual = s.search(new TermQuery(new Term("body", ent.getKey())), 100);
|
TopDocs actual = s.search(new TermQuery(new Term("body", ent.getKey())), 100);
|
||||||
TopDocs expected = ent.getValue();
|
TopDocs expected = ent.getValue();
|
||||||
assertEquals(expected.totalHits, actual.totalHits);
|
assertEquals(expected.totalHits.value, actual.totalHits.value);
|
||||||
assertEquals("query=" + ent.getKey().utf8ToString(), expected.scoreDocs.length, actual.scoreDocs.length);
|
assertEquals("query=" + ent.getKey().utf8ToString(), expected.scoreDocs.length, actual.scoreDocs.length);
|
||||||
for(int hit=0;hit<expected.scoreDocs.length;hit++) {
|
for(int hit=0;hit<expected.scoreDocs.length;hit++) {
|
||||||
assertEquals(expected.scoreDocs[hit].doc, actual.scoreDocs[hit].doc);
|
assertEquals(expected.scoreDocs[hit].doc, actual.scoreDocs[hit].doc);
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class TestSearchAfter extends LuceneTestCase {
|
||||||
all = searcher.search(query, maxDoc, sort, doScores);
|
all = searcher.search(query, maxDoc, sort, doScores);
|
||||||
}
|
}
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" all.totalHits=" + all.totalHits);
|
System.out.println(" all.totalHits.value=" + all.totalHits.value);
|
||||||
int upto = 0;
|
int upto = 0;
|
||||||
for(ScoreDoc scoreDoc : all.scoreDocs) {
|
for(ScoreDoc scoreDoc : all.scoreDocs) {
|
||||||
System.out.println(" hit " + (upto++) + ": id=" + searcher.doc(scoreDoc.doc).get("id") + " " + scoreDoc);
|
System.out.println(" hit " + (upto++) + ": id=" + searcher.doc(scoreDoc.doc).get("id") + " " + scoreDoc);
|
||||||
|
@ -233,7 +233,7 @@ public class TestSearchAfter extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
int pageStart = 0;
|
int pageStart = 0;
|
||||||
ScoreDoc lastBottom = null;
|
ScoreDoc lastBottom = null;
|
||||||
while (pageStart < all.totalHits) {
|
while (pageStart < all.totalHits.value) {
|
||||||
TopDocs paged;
|
TopDocs paged;
|
||||||
if (sort == null) {
|
if (sort == null) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -265,7 +265,7 @@ public class TestSearchAfter extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertPage(int pageStart, TopDocs all, TopDocs paged) throws IOException {
|
void assertPage(int pageStart, TopDocs all, TopDocs paged) throws IOException {
|
||||||
assertEquals(all.totalHits, paged.totalHits);
|
assertEquals(all.totalHits.value, paged.totalHits.value);
|
||||||
for (int i = 0; i < paged.scoreDocs.length; i++) {
|
for (int i = 0; i < paged.scoreDocs.length; i++) {
|
||||||
ScoreDoc sd1 = all.scoreDocs[pageStart + i];
|
ScoreDoc sd1 = all.scoreDocs[pageStart + i];
|
||||||
ScoreDoc sd2 = paged.scoreDocs[i];
|
ScoreDoc sd2 = paged.scoreDocs[i];
|
||||||
|
|
|
@ -328,12 +328,12 @@ public class TestShardSearching extends ShardSearchingTestBase {
|
||||||
System.out.println(" shard=" + shardID + " maxDoc=" + shardSearchers[shardID].searcher.getIndexReader().maxDoc());
|
System.out.println(" shard=" + shardID + " maxDoc=" + shardSearchers[shardID].searcher.getIndexReader().maxDoc());
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
System.out.println(" single searcher: " + hits.totalHits);
|
System.out.println(" single searcher: " + hits.totalHits.value);
|
||||||
for(int i=0;i<hits.scoreDocs.length;i++) {
|
for(int i=0;i<hits.scoreDocs.length;i++) {
|
||||||
final ScoreDoc sd = hits.scoreDocs[i];
|
final ScoreDoc sd = hits.scoreDocs[i];
|
||||||
System.out.println(" doc=" + sd.doc + " score=" + sd.score);
|
System.out.println(" doc=" + sd.doc + " score=" + sd.score);
|
||||||
}
|
}
|
||||||
System.out.println(" shard searcher: " + shardHits.totalHits);
|
System.out.println(" shard searcher: " + shardHits.totalHits.value);
|
||||||
for(int i=0;i<shardHits.scoreDocs.length;i++) {
|
for(int i=0;i<shardHits.scoreDocs.length;i++) {
|
||||||
final ScoreDoc sd = shardHits.scoreDocs[i];
|
final ScoreDoc sd = shardHits.scoreDocs[i];
|
||||||
System.out.println(" doc=" + sd.doc + " (rebased: " + (sd.doc + base[sd.shardIndex]) + ") score=" + sd.score + " shard=" + sd.shardIndex);
|
System.out.println(" doc=" + sd.doc + " (rebased: " + (sd.doc + base[sd.shardIndex]) + ") score=" + sd.score + " shard=" + sd.shardIndex);
|
||||||
|
@ -355,7 +355,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
|
||||||
final ScoreDoc bottomHit;
|
final ScoreDoc bottomHit;
|
||||||
final ScoreDoc bottomHitShards;
|
final ScoreDoc bottomHitShards;
|
||||||
|
|
||||||
if (numHitsPaged < hits.totalHits) {
|
if (numHitsPaged < hits.totalHits.value) {
|
||||||
// More hits to page through
|
// More hits to page through
|
||||||
moreHits = true;
|
moreHits = true;
|
||||||
if (sort == null) {
|
if (sort == null) {
|
||||||
|
@ -372,7 +372,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
assertEquals(hits.totalHits, numHitsPaged);
|
assertEquals(hits.totalHits.value, numHitsPaged);
|
||||||
bottomHit = null;
|
bottomHit = null;
|
||||||
bottomHitShards = null;
|
bottomHitShards = null;
|
||||||
moreHits = false;
|
moreHits = false;
|
||||||
|
|
|
@ -82,9 +82,9 @@ public class TestSimilarityProvider extends LuceneTestCase {
|
||||||
|
|
||||||
// sanity check of searching
|
// sanity check of searching
|
||||||
TopDocs foodocs = searcher.search(new TermQuery(new Term("foo", "brown")), 10);
|
TopDocs foodocs = searcher.search(new TermQuery(new Term("foo", "brown")), 10);
|
||||||
assertTrue(foodocs.totalHits > 0);
|
assertTrue(foodocs.totalHits.value > 0);
|
||||||
TopDocs bardocs = searcher.search(new TermQuery(new Term("bar", "brown")), 10);
|
TopDocs bardocs = searcher.search(new TermQuery(new Term("bar", "brown")), 10);
|
||||||
assertTrue(bardocs.totalHits > 0);
|
assertTrue(bardocs.totalHits.value > 0);
|
||||||
assertTrue(foodocs.scoreDocs[0].score < bardocs.scoreDocs[0].score);
|
assertTrue(foodocs.scoreDocs[0].score < bardocs.scoreDocs[0].score);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -261,13 +261,13 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
||||||
builder.add(new Term("lyrics", "drug"), 4);
|
builder.add(new Term("lyrics", "drug"), 4);
|
||||||
PhraseQuery pq = builder.build();
|
PhraseQuery pq = builder.build();
|
||||||
// "drug the drug"~1
|
// "drug the drug"~1
|
||||||
assertEquals(1, is.search(pq, 4).totalHits);
|
assertEquals(1, is.search(pq, 4).totalHits.value);
|
||||||
builder.setSlop(1);
|
builder.setSlop(1);
|
||||||
pq = builder.build();
|
pq = builder.build();
|
||||||
assertEquals(3, is.search(pq, 4).totalHits);
|
assertEquals(3, is.search(pq, 4).totalHits.value);
|
||||||
builder.setSlop(2);
|
builder.setSlop(2);
|
||||||
pq = builder.build();
|
pq = builder.build();
|
||||||
assertEquals(4, is.search(pq, 4).totalHits);
|
assertEquals(4, is.search(pq, 4).totalHits.value);
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,7 +102,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'foo'
|
// 'bar' comes before 'foo'
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -130,7 +130,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'foo' comes after 'bar' in reverse order
|
// 'foo' comes after 'bar' in reverse order
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -158,7 +158,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'foo'
|
// 'bar' comes before 'foo'
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -186,7 +186,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'foo' comes after 'bar' in reverse order
|
// 'foo' comes after 'bar' in reverse order
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -214,7 +214,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'foo'
|
// 'bar' comes before 'foo'
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -242,7 +242,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.STRING_VAL, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'foo' comes after 'bar' in reverse order
|
// 'foo' comes after 'bar' in reverse order
|
||||||
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -274,7 +274,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
|
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -307,7 +307,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// reverse numeric order
|
// reverse numeric order
|
||||||
assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -338,7 +338,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
|
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as a 0
|
// null is treated as a 0
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -371,7 +371,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as a Integer.MAX_VALUE
|
// null is treated as a Integer.MAX_VALUE
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -404,7 +404,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
|
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -437,7 +437,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// reverse numeric order
|
// reverse numeric order
|
||||||
assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -468,7 +468,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
|
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as 0
|
// null is treated as 0
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -501,7 +501,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as Long.MAX_VALUE
|
// null is treated as Long.MAX_VALUE
|
||||||
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -534,7 +534,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
|
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -567,7 +567,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// reverse numeric order
|
// reverse numeric order
|
||||||
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -598,7 +598,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
|
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as 0
|
// null is treated as 0
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -631,7 +631,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null is treated as Float.MAX_VALUE
|
// null is treated as Float.MAX_VALUE
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -668,7 +668,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -699,7 +699,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("-0", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-0", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("+0", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("+0", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -735,7 +735,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
|
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
// numeric order
|
// numeric order
|
||||||
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -771,7 +771,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
// null treated as a 0
|
// null treated as a 0
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -809,7 +809,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
// null treated as Double.MAX_VALUE
|
// null treated as Double.MAX_VALUE
|
||||||
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
|
||||||
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
|
||||||
|
@ -857,7 +857,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
new SortField("value2", SortField.Type.LONG));
|
new SortField("value2", SortField.Type.LONG));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
// 'bar' comes before 'foo'
|
// 'bar' comes before 'foo'
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value1"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value1"));
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value1"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).get("value1"));
|
||||||
|
@ -871,7 +871,7 @@ public class TestSort extends LuceneTestCase {
|
||||||
|
|
||||||
// Now with overflow
|
// Now with overflow
|
||||||
td = searcher.search(new MatchAllDocsQuery(), 1, sort);
|
td = searcher.search(new MatchAllDocsQuery(), 1, sort);
|
||||||
assertEquals(4, td.totalHits);
|
assertEquals(4, td.totalHits.value);
|
||||||
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value1"));
|
assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).get("value1"));
|
||||||
assertEquals("0", searcher.doc(td.scoreDocs[0].doc).get("value2"));
|
assertEquals("0", searcher.doc(td.scoreDocs[0].doc).get("value2"));
|
||||||
|
|
||||||
|
|
|
@ -84,7 +84,7 @@ public class TestSortRescorer extends LuceneTestCase {
|
||||||
|
|
||||||
// Just first pass query
|
// Just first pass query
|
||||||
TopDocs hits = searcher.search(query, 10);
|
TopDocs hits = searcher.search(query, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
@ -93,7 +93,7 @@ public class TestSortRescorer extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortField("popularity", SortField.Type.INT, true));
|
Sort sort = new Sort(new SortField("popularity", SortField.Type.INT, true));
|
||||||
Rescorer rescorer = new SortRescorer(sort);
|
Rescorer rescorer = new SortRescorer(sort);
|
||||||
hits = rescorer.rescore(searcher, hits, 10);
|
hits = rescorer.rescore(searcher, hits, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
@ -119,7 +119,7 @@ public class TestSortRescorer extends LuceneTestCase {
|
||||||
|
|
||||||
// Just first pass query
|
// Just first pass query
|
||||||
TopDocs hits = searcher.search(query, 10);
|
TopDocs hits = searcher.search(query, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
@ -130,7 +130,7 @@ public class TestSortRescorer extends LuceneTestCase {
|
||||||
Sort sort = new Sort(source.getSortField(true));
|
Sort sort = new Sort(source.getSortField(true));
|
||||||
Rescorer rescorer = new SortRescorer(sort);
|
Rescorer rescorer = new SortRescorer(sort);
|
||||||
hits = rescorer.rescore(searcher, hits, 10);
|
hits = rescorer.rescore(searcher, hits, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
|
|
@ -37,13 +37,13 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort();
|
Sort sort = new Sort();
|
||||||
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG));
|
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG));
|
||||||
TopDocs td = empty.search(query, 10, sort, true);
|
TopDocs td = empty.search(query, 10, sort, true);
|
||||||
assertEquals(0, td.totalHits);
|
assertEquals(0, td.totalHits.value);
|
||||||
|
|
||||||
// for an empty index, any selector should work
|
// for an empty index, any selector should work
|
||||||
for (SortedNumericSelector.Type v : SortedNumericSelector.Type.values()) {
|
for (SortedNumericSelector.Type v : SortedNumericSelector.Type.values()) {
|
||||||
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG, false, v));
|
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG, false, v));
|
||||||
td = empty.search(query, 10, sort, true);
|
td = empty.search(query, 10, sort, true);
|
||||||
assertEquals(0, td.totalHits);
|
assertEquals(0, td.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT));
|
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 3 comes before 5
|
// 3 comes before 5
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -112,7 +112,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT, true));
|
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT, true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -145,7 +145,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 3 comes before 5
|
// 3 comes before 5
|
||||||
// null comes first
|
// null comes first
|
||||||
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
|
@ -180,7 +180,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 3 comes before 5
|
// 3 comes before 5
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -209,7 +209,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT));
|
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.INT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 3 comes before 5
|
// 3 comes before 5
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -237,7 +237,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.FLOAT));
|
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.FLOAT));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// -5 comes before -3
|
// -5 comes before -3
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -265,7 +265,7 @@ public class TestSortedNumericSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.DOUBLE));
|
Sort sort = new Sort(new SortedNumericSortField("value", SortField.Type.DOUBLE));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// -5 comes before -3
|
// -5 comes before -3
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'baz' comes before 'foo'
|
// 'baz' comes before 'foo'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -83,7 +83,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'baz' comes before 'foo'
|
// 'baz' comes before 'foo'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -118,7 +118,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null comes first
|
// null comes first
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
// 'baz' comes before 'foo'
|
// 'baz' comes before 'foo'
|
||||||
|
@ -155,7 +155,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 'baz' comes before 'foo'
|
// 'baz' comes before 'foo'
|
||||||
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -185,7 +185,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -216,7 +216,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MIN));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MIN));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -247,7 +247,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MIDDLE_MIN));
|
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MIDDLE_MIN));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -283,7 +283,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null comes first
|
// null comes first
|
||||||
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
|
@ -321,7 +321,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -351,7 +351,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MIN));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MIN));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -382,7 +382,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -413,7 +413,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MIDDLE_MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", true, SortedSetSelector.Type.MIDDLE_MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -449,7 +449,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// null comes first
|
// null comes first
|
||||||
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
|
@ -487,7 +487,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 'b' comes before 'c'
|
// 'b' comes before 'c'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -517,7 +517,7 @@ public class TestSortedSetSelector extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MAX));
|
Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSelector.Type.MIDDLE_MAX));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
|
|
@ -38,13 +38,13 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort();
|
Sort sort = new Sort();
|
||||||
sort.setSort(new SortedSetSortField("sortedset", false));
|
sort.setSort(new SortedSetSortField("sortedset", false));
|
||||||
TopDocs td = empty.search(query, 10, sort, true);
|
TopDocs td = empty.search(query, 10, sort, true);
|
||||||
assertEquals(0, td.totalHits);
|
assertEquals(0, td.totalHits.value);
|
||||||
|
|
||||||
// for an empty index, any selector should work
|
// for an empty index, any selector should work
|
||||||
for (SortedSetSelector.Type v : SortedSetSelector.Type.values()) {
|
for (SortedSetSelector.Type v : SortedSetSelector.Type.values()) {
|
||||||
sort.setSort(new SortedSetSortField("sortedset", false, v));
|
sort.setSort(new SortedSetSortField("sortedset", false, v));
|
||||||
td = empty.search(query, 10, sort, true);
|
td = empty.search(query, 10, sort, true);
|
||||||
assertEquals(0, td.totalHits);
|
assertEquals(0, td.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false));
|
Sort sort = new Sort(new SortedSetSortField("value", false));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -112,7 +112,7 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", true));
|
Sort sort = new Sort(new SortedSetSortField("value", true));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -145,7 +145,7 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
// null comes first
|
// null comes first
|
||||||
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("3", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
|
@ -180,7 +180,7 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(sortField);
|
Sort sort = new Sort(sortField);
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(3, td.totalHits);
|
assertEquals(3, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
@ -209,7 +209,7 @@ public class TestSortedSetSortField extends LuceneTestCase {
|
||||||
Sort sort = new Sort(new SortedSetSortField("value", false));
|
Sort sort = new Sort(new SortedSetSortField("value", false));
|
||||||
|
|
||||||
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
|
||||||
assertEquals(2, td.totalHits);
|
assertEquals(2, td.totalHits.value);
|
||||||
// 'bar' comes before 'baz'
|
// 'bar' comes before 'baz'
|
||||||
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
assertEquals("1", searcher.doc(td.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
assertEquals("2", searcher.doc(td.scoreDocs[1].doc).get("id"));
|
||||||
|
|
|
@ -92,7 +92,7 @@ public class TestSynonymQuery extends LuceneTestCase {
|
||||||
searcher.search(query, collector);
|
searcher.search(query, collector);
|
||||||
TopDocs topDocs = collector.topDocs();
|
TopDocs topDocs = collector.topDocs();
|
||||||
if (trackTotalHits) {
|
if (trackTotalHits) {
|
||||||
assertEquals(11, topDocs.totalHits);
|
assertEquals(11, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
// All docs must have the same score
|
// All docs must have the same score
|
||||||
for (int i = 0; i < topDocs.scoreDocs.length; ++i) {
|
for (int i = 0; i < topDocs.scoreDocs.length; ++i) {
|
||||||
|
|
|
@ -271,7 +271,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
initializeIndex(new String[] {"A", "B", "", "C", "D"}, analyzer);
|
initializeIndex(new String[] {"A", "B", "", "C", "D"}, analyzer);
|
||||||
IndexReader reader = DirectoryReader.open(dir);
|
IndexReader reader = DirectoryReader.open(dir);
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
long numHits = searcher.search(query, 1000).totalHits;
|
long numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("A,B,<empty string>,C,D => A, B & <empty string> are in range", 3, numHits);
|
assertEquals("A,B,<empty string>,C,D => A, B & <empty string> are in range", 3, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert:
|
// until Lucene-38 is fixed, use this assert:
|
||||||
|
@ -281,7 +281,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
initializeIndex(new String[] {"A", "B", "", "D"}, analyzer);
|
initializeIndex(new String[] {"A", "B", "", "D"}, analyzer);
|
||||||
reader = DirectoryReader.open(dir);
|
reader = DirectoryReader.open(dir);
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
numHits = searcher.search(query, 1000).totalHits;
|
numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("A,B,<empty string>,D => A, B & <empty string> are in range", 3, numHits);
|
assertEquals("A,B,<empty string>,D => A, B & <empty string> are in range", 3, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert:
|
// until Lucene-38 is fixed, use this assert:
|
||||||
|
@ -290,7 +290,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
addDoc("C");
|
addDoc("C");
|
||||||
reader = DirectoryReader.open(dir);
|
reader = DirectoryReader.open(dir);
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
numHits = searcher.search(query, 1000).totalHits;
|
numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("C added, still A, B & <empty string> are in range", 3, numHits);
|
assertEquals("C added, still A, B & <empty string> are in range", 3, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert
|
// until Lucene-38 is fixed, use this assert
|
||||||
|
@ -306,7 +306,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
initializeIndex(new String[]{"A", "B", "","C", "D"}, analyzer);
|
initializeIndex(new String[]{"A", "B", "","C", "D"}, analyzer);
|
||||||
IndexReader reader = DirectoryReader.open(dir);
|
IndexReader reader = DirectoryReader.open(dir);
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
long numHits = searcher.search(query, 1000).totalHits;
|
long numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("A,B,<empty string>,C,D => A,B,<empty string>,C in range", 4, numHits);
|
assertEquals("A,B,<empty string>,C,D => A,B,<empty string>,C in range", 4, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert
|
// until Lucene-38 is fixed, use this assert
|
||||||
|
@ -315,7 +315,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
initializeIndex(new String[]{"A", "B", "", "D"}, analyzer);
|
initializeIndex(new String[]{"A", "B", "", "D"}, analyzer);
|
||||||
reader = DirectoryReader.open(dir);
|
reader = DirectoryReader.open(dir);
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
numHits = searcher.search(query, 1000).totalHits;
|
numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("A,B,<empty string>,D - A, B and <empty string> in range", 3, numHits);
|
assertEquals("A,B,<empty string>,D - A, B and <empty string> in range", 3, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert
|
// until Lucene-38 is fixed, use this assert
|
||||||
|
@ -324,7 +324,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
||||||
addDoc("C");
|
addDoc("C");
|
||||||
reader = DirectoryReader.open(dir);
|
reader = DirectoryReader.open(dir);
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
numHits = searcher.search(query, 1000).totalHits;
|
numHits = searcher.search(query, 1000).totalHits.value;
|
||||||
// When Lucene-38 is fixed, use the assert on the next line:
|
// When Lucene-38 is fixed, use the assert on the next line:
|
||||||
assertEquals("C added => A,B,<empty string>,C in range", 4, numHits);
|
assertEquals("C added => A,B,<empty string>,C in range", 4, numHits);
|
||||||
// until Lucene-38 is fixed, use this assert
|
// until Lucene-38 is fixed, use this assert
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class TestTopDocsCollector extends LuceneTestCase {
|
||||||
return EMPTY_TOPDOCS;
|
return EMPTY_TOPDOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new TopDocs(totalHits, results);
|
return new TopDocs(new TotalHits(totalHits, totalHitsRelation), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -288,7 +288,7 @@ public class TestTopDocsCollector extends LuceneTestCase {
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEstimateHitCount() throws Exception {
|
public void testNotTrackTotalHits() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -323,64 +323,8 @@ public class TestTopDocsCollector extends LuceneTestCase {
|
||||||
|
|
||||||
TopDocs topDocs = collector.topDocs();
|
TopDocs topDocs = collector.topDocs();
|
||||||
// It assumes all docs matched since numHits was 2 and the first 2 collected docs matched
|
// It assumes all docs matched since numHits was 2 and the first 2 collected docs matched
|
||||||
assertEquals(10, topDocs.totalHits);
|
assertEquals(3, topDocs.totalHits.value);
|
||||||
|
assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation);
|
||||||
// Now test an index that is more sparsely collected
|
|
||||||
collector = TopScoreDocCollector.create(2, null, false);
|
|
||||||
|
|
||||||
leafCollector = collector.getLeafCollector(reader.leaves().get(0));
|
|
||||||
leafCollector.setScorer(scorer);
|
|
||||||
|
|
||||||
scorer.doc = 1;
|
|
||||||
scorer.score = 3;
|
|
||||||
leafCollector.collect(1);
|
|
||||||
|
|
||||||
leafCollector = collector.getLeafCollector(reader.leaves().get(1));
|
|
||||||
leafCollector.setScorer(scorer);
|
|
||||||
|
|
||||||
scorer.doc = 0;
|
|
||||||
scorer.score = 2;
|
|
||||||
leafCollector.collect(0);
|
|
||||||
|
|
||||||
scorer.doc = 2;
|
|
||||||
scorer.score = 5;
|
|
||||||
leafCollector.collect(2);
|
|
||||||
|
|
||||||
topDocs = collector.topDocs();
|
|
||||||
assertEquals(4, topDocs.totalHits);
|
|
||||||
|
|
||||||
// Same 2 first collected docs, but then we collect more docs to make sure
|
|
||||||
// that we use the actual number of collected docs as a lower bound
|
|
||||||
collector = TopScoreDocCollector.create(2, null, false);
|
|
||||||
|
|
||||||
leafCollector = collector.getLeafCollector(reader.leaves().get(0));
|
|
||||||
leafCollector.setScorer(scorer);
|
|
||||||
|
|
||||||
scorer.doc = 1;
|
|
||||||
scorer.score = 3;
|
|
||||||
leafCollector.collect(1);
|
|
||||||
|
|
||||||
leafCollector = collector.getLeafCollector(reader.leaves().get(1));
|
|
||||||
leafCollector.setScorer(scorer);
|
|
||||||
|
|
||||||
scorer.doc = 0;
|
|
||||||
scorer.score = 2;
|
|
||||||
leafCollector.collect(0);
|
|
||||||
|
|
||||||
scorer.doc = 2;
|
|
||||||
scorer.score = 5;
|
|
||||||
leafCollector.collect(2);
|
|
||||||
|
|
||||||
scorer.doc = 3;
|
|
||||||
scorer.score = 4;
|
|
||||||
leafCollector.collect(3);
|
|
||||||
|
|
||||||
scorer.doc = 4;
|
|
||||||
scorer.score = 1;
|
|
||||||
leafCollector.collect(4);
|
|
||||||
|
|
||||||
topDocs = collector.topDocs();
|
|
||||||
assertEquals(5, topDocs.totalHits);
|
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
|
|
@ -77,8 +77,8 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
|
|
||||||
public void testInconsistentTopDocsFail() {
|
public void testInconsistentTopDocsFail() {
|
||||||
TopDocs[] topDocs = new TopDocs[] {
|
TopDocs[] topDocs = new TopDocs[] {
|
||||||
new TopDocs(1, new ScoreDoc[] { new ScoreDoc(1, 1.0f) }),
|
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(1, 1.0f) }),
|
||||||
new TopDocs(1, new ScoreDoc[] { new ScoreDoc(1, 1.0f, -1) })
|
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(1, 1.0f, -1) })
|
||||||
};
|
};
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
ArrayUtil.swap(topDocs, 0, 1);
|
ArrayUtil.swap(topDocs, 0, 1);
|
||||||
|
@ -103,7 +103,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
// we set the shard index to index in the list here but shuffle the entire list below
|
// we set the shard index to index in the list here but shuffle the entire list below
|
||||||
scoreDocs[j] = new ScoreDoc((100 * i) + j, score , i);
|
scoreDocs[j] = new ScoreDoc((100 * i) + j, score , i);
|
||||||
}
|
}
|
||||||
topDocs.add(new TopDocs(numHits, scoreDocs));
|
topDocs.add(new TopDocs(new TotalHits(numHits, TotalHits.Relation.EQUAL_TO), scoreDocs));
|
||||||
shardResultMapping.put(i, topDocs.get(i));
|
shardResultMapping.put(i, topDocs.get(i));
|
||||||
}
|
}
|
||||||
// shuffle the entire thing such that we don't get 1 to 1 mapping of shard index to index in the array
|
// shuffle the entire thing such that we don't get 1 to 1 mapping of shard index to index in the array
|
||||||
|
@ -306,7 +306,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
if (useFrom) {
|
if (useFrom) {
|
||||||
System.out.println("from=" + from + " size=" + size);
|
System.out.println("from=" + from + " size=" + size);
|
||||||
}
|
}
|
||||||
System.out.println(" top search: " + topHits.totalHits + " totalHits; hits=" + (topHits.scoreDocs == null ? "null" : topHits.scoreDocs.length));
|
System.out.println(" top search: " + topHits.totalHits.value + " totalHits; hits=" + (topHits.scoreDocs == null ? "null" : topHits.scoreDocs.length));
|
||||||
if (topHits.scoreDocs != null) {
|
if (topHits.scoreDocs != null) {
|
||||||
for(int hitIDX=0;hitIDX<topHits.scoreDocs.length;hitIDX++) {
|
for(int hitIDX=0;hitIDX<topHits.scoreDocs.length;hitIDX++) {
|
||||||
final ScoreDoc sd = topHits.scoreDocs[hitIDX];
|
final ScoreDoc sd = topHits.scoreDocs[hitIDX];
|
||||||
|
@ -337,7 +337,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
|
|
||||||
shardHits[shardIDX] = subHits;
|
shardHits[shardIDX] = subHits;
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" shard=" + shardIDX + " " + subHits.totalHits + " totalHits hits=" + (subHits.scoreDocs == null ? "null" : subHits.scoreDocs.length));
|
System.out.println(" shard=" + shardIDX + " " + subHits.totalHits.value + " totalHits hits=" + (subHits.scoreDocs == null ? "null" : subHits.scoreDocs.length));
|
||||||
if (subHits.scoreDocs != null) {
|
if (subHits.scoreDocs != null) {
|
||||||
for(ScoreDoc sd : subHits.scoreDocs) {
|
for(ScoreDoc sd : subHits.scoreDocs) {
|
||||||
System.out.println(" doc=" + sd.doc + " score=" + sd.score);
|
System.out.println(" doc=" + sd.doc + " score=" + sd.score);
|
||||||
|
@ -378,4 +378,27 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testMergeTotalHitsRelation() {
|
||||||
|
TopDocs topDocs1 = new TopDocs(new TotalHits(2, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 2f) });
|
||||||
|
TopDocs topDocs2 = new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 2f) });
|
||||||
|
TopDocs topDocs3 = new TopDocs(new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 2f) });
|
||||||
|
TopDocs topDocs4 = new TopDocs(new TotalHits(3, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 2f) });
|
||||||
|
|
||||||
|
TopDocs merged1 = TopDocs.merge(1, new TopDocs[] {topDocs1, topDocs2});
|
||||||
|
assertEquals(3, merged1.totalHits.value);
|
||||||
|
assertEquals(TotalHits.Relation.EQUAL_TO, merged1.totalHits.relation);
|
||||||
|
|
||||||
|
TopDocs merged2 = TopDocs.merge(1, new TopDocs[] {topDocs1, topDocs3});
|
||||||
|
assertEquals(3, merged2.totalHits.value);
|
||||||
|
assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, merged2.totalHits.relation);
|
||||||
|
|
||||||
|
TopDocs merged3 = TopDocs.merge(1, new TopDocs[] {topDocs3, topDocs4});
|
||||||
|
assertEquals(4, merged3.totalHits.value);
|
||||||
|
assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, merged3.totalHits.relation);
|
||||||
|
|
||||||
|
TopDocs merged4 = TopDocs.merge(1, new TopDocs[] {topDocs4, topDocs2});
|
||||||
|
assertEquals(4, merged4.totalHits.value);
|
||||||
|
assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, merged4.totalHits.relation);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -134,7 +134,7 @@ public class TestTopFieldCollector extends LuceneTestCase {
|
||||||
for(int i = 0; i < sort.length; i++) {
|
for(int i = 0; i < sort.length; i++) {
|
||||||
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true);
|
TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true);
|
||||||
TopDocs td = tdc.topDocs();
|
TopDocs td = tdc.topDocs();
|
||||||
assertEquals(0, td.totalHits);
|
assertEquals(0, td.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -156,10 +156,10 @@ public class TestTopFieldCollectorEarlyTermination extends LuceneTestCase {
|
||||||
assertTrue(collector2.isEarlyTerminated());
|
assertTrue(collector2.isEarlyTerminated());
|
||||||
}
|
}
|
||||||
if (collector2.isEarlyTerminated()) {
|
if (collector2.isEarlyTerminated()) {
|
||||||
assertTrue(td2.totalHits >= td1.scoreDocs.length);
|
assertTrue(td2.totalHits.value >= td1.scoreDocs.length);
|
||||||
assertTrue(td2.totalHits <= reader.maxDoc());
|
assertTrue(td2.totalHits.value <= reader.maxDoc());
|
||||||
} else {
|
} else {
|
||||||
assertEquals(td2.totalHits, td1.totalHits);
|
assertEquals(td2.totalHits.value, td1.totalHits.value);
|
||||||
}
|
}
|
||||||
assertTopDocsEquals(td1.scoreDocs, td2.scoreDocs);
|
assertTopDocsEquals(td1.scoreDocs, td2.scoreDocs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ public class TestWildcardRandom extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
Query wq = new WildcardQuery(new Term("field", filledPattern));
|
Query wq = new WildcardQuery(new Term("field", filledPattern));
|
||||||
TopDocs docs = searcher.search(wq, 25);
|
TopDocs docs = searcher.search(wq, 25);
|
||||||
assertEquals("Incorrect hits for pattern: " + pattern, numHits, docs.totalHits);
|
assertEquals("Incorrect hits for pattern: " + pattern, numHits, docs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -57,16 +57,16 @@ public class TestBooleanSimilarity extends BaseSimilarityTestCase {
|
||||||
IndexSearcher searcher = newSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
searcher.setSimilarity(new BooleanSimilarity());
|
searcher.setSimilarity(new BooleanSimilarity());
|
||||||
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "bar")), 2);
|
TopDocs topDocs = searcher.search(new TermQuery(new Term("foo", "bar")), 2);
|
||||||
assertEquals(2, topDocs.totalHits);
|
assertEquals(2, topDocs.totalHits.value);
|
||||||
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
||||||
assertEquals(1f, topDocs.scoreDocs[1].score, 0f);
|
assertEquals(1f, topDocs.scoreDocs[1].score, 0f);
|
||||||
|
|
||||||
topDocs = searcher.search(new TermQuery(new Term("foo", "baz")), 1);
|
topDocs = searcher.search(new TermQuery(new Term("foo", "baz")), 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
||||||
|
|
||||||
topDocs = searcher.search(new BoostQuery(new TermQuery(new Term("foo", "baz")), 3f), 1);
|
topDocs = searcher.search(new BoostQuery(new TermQuery(new Term("foo", "baz")), 3f), 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(3f, topDocs.scoreDocs[0].score, 0f);
|
assertEquals(3f, topDocs.scoreDocs[0].score, 0f);
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
|
@ -89,11 +89,11 @@ public class TestBooleanSimilarity extends BaseSimilarityTestCase {
|
||||||
PhraseQuery query = new PhraseQuery(2, "foo", "bar", "quux");
|
PhraseQuery query = new PhraseQuery(2, "foo", "bar", "quux");
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(query, 2);
|
TopDocs topDocs = searcher.search(query, 2);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
|
||||||
|
|
||||||
topDocs = searcher.search(new BoostQuery(query, 7), 2);
|
topDocs = searcher.search(new BoostQuery(query, 7), 2);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(7f, topDocs.scoreDocs[0].score, 0f);
|
assertEquals(7f, topDocs.scoreDocs[0].score, 0f);
|
||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
public void testHit() throws IOException {
|
public void testHit() throws IOException {
|
||||||
Query query = new TermQuery(new Term("test", "hit"));
|
Query query = new TermQuery(new Term("test", "hit"));
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -79,13 +79,13 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
public void testMiss() throws IOException {
|
public void testMiss() throws IOException {
|
||||||
Query query = new TermQuery(new Term("test", "miss"));
|
Query query = new TermQuery(new Term("test", "miss"));
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmpty() throws IOException {
|
public void testEmpty() throws IOException {
|
||||||
Query query = new TermQuery(new Term("empty", "miss"));
|
Query query = new TermQuery(new Term("empty", "miss"));
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(0, topDocs.totalHits);
|
assertEquals(0, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBQHit() throws IOException {
|
public void testBQHit() throws IOException {
|
||||||
|
@ -93,7 +93,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
.add(new TermQuery(new Term("test", "hit")), Occur.SHOULD)
|
.add(new TermQuery(new Term("test", "hit")), Occur.SHOULD)
|
||||||
.build();
|
.build();
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
.add(new TermQuery(new Term("test", "miss")), Occur.SHOULD)
|
.add(new TermQuery(new Term("test", "miss")), Occur.SHOULD)
|
||||||
.build();
|
.build();
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
.add(new TermQuery(new Term("empty", "miss")), Occur.SHOULD)
|
.add(new TermQuery(new Term("empty", "miss")), Occur.SHOULD)
|
||||||
.build();
|
.build();
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
new TermQuery(new Term("test", "hit"))),
|
new TermQuery(new Term("test", "hit"))),
|
||||||
0);
|
0);
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -138,7 +138,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
new TermQuery(new Term("test", "miss"))),
|
new TermQuery(new Term("test", "miss"))),
|
||||||
0);
|
0);
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,7 @@ public class TestClassicSimilarity extends BaseSimilarityTestCase {
|
||||||
new TermQuery(new Term("empty", "miss"))),
|
new TermQuery(new Term("empty", "miss"))),
|
||||||
0);
|
0);
|
||||||
TopDocs topDocs = indexSearcher.search(query, 1);
|
TopDocs topDocs = indexSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertTrue(topDocs.scoreDocs[0].score != 0);
|
assertTrue(topDocs.scoreDocs[0].score != 0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
|
|
||||||
for (Similarity sim : sims) {
|
for (Similarity sim : sims) {
|
||||||
is.setSimilarity(sim);
|
is.setSimilarity(sim);
|
||||||
assertEquals(0, is.search(new TermQuery(new Term("foo", "bar")), 10).totalHits);
|
assertEquals(0, is.search(new TermQuery(new Term("foo", "bar")), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -118,7 +118,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
||||||
query.add(new TermQuery(new Term("bar", "baz")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("bar", "baz")), BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, is.search(query.build(), 10).totalHits);
|
assertEquals(1, is.search(query.build(), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -140,7 +140,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
||||||
query.add(new TermQuery(new Term("foo", "baz")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "baz")), BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, is.search(query.build(), 10).totalHits);
|
assertEquals(1, is.search(query.build(), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -164,7 +164,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
is.setSimilarity(sim);
|
is.setSimilarity(sim);
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, is.search(query.build(), 10).totalHits);
|
assertEquals(1, is.search(query.build(), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -235,7 +235,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
is.setSimilarity(sim);
|
is.setSimilarity(sim);
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, is.search(query.build(), 10).totalHits);
|
assertEquals(1, is.search(query.build(), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -261,7 +261,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
is.setSimilarity(sim);
|
is.setSimilarity(sim);
|
||||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||||
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
query.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
|
||||||
assertEquals(1, is.search(query.build(), 10).totalHits);
|
assertEquals(1, is.search(query.build(), 10).totalHits.value);
|
||||||
}
|
}
|
||||||
ir.close();
|
ir.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -286,7 +286,7 @@ public class TestSimilarity2 extends LuceneTestCase {
|
||||||
SpanTermQuery s2 = new SpanTermQuery(new Term("foo", "baz"));
|
SpanTermQuery s2 = new SpanTermQuery(new Term("foo", "baz"));
|
||||||
Query query = new SpanOrQuery(s1, s2);
|
Query query = new SpanOrQuery(s1, s2);
|
||||||
TopDocs td = is.search(query, 10);
|
TopDocs td = is.search(query, 10);
|
||||||
assertEquals(1, td.totalHits);
|
assertEquals(1, td.totalHits.value);
|
||||||
float score = td.scoreDocs[0].score;
|
float score = td.scoreDocs[0].score;
|
||||||
assertFalse("negative score for " + sim, score < 0.0f);
|
assertFalse("negative score for " + sim, score < 0.0f);
|
||||||
assertFalse("inf score for " + sim, Float.isInfinite(score));
|
assertFalse("inf score for " + sim, Float.isInfinite(score));
|
||||||
|
|
|
@ -523,7 +523,7 @@ public class TestSimilarityBase extends LuceneTestCase {
|
||||||
for (SimilarityBase sim : sims) {
|
for (SimilarityBase sim : sims) {
|
||||||
searcher.setSimilarity(sim);
|
searcher.setSimilarity(sim);
|
||||||
TopDocs topDocs = searcher.search(q, 1000);
|
TopDocs topDocs = searcher.search(q, 1000);
|
||||||
assertEquals("Failed: " + sim.toString(), 3, topDocs.totalHits);
|
assertEquals("Failed: " + sim.toString(), 3, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,12 +53,12 @@ public class TestSpanFirstQuery extends LuceneTestCase {
|
||||||
|
|
||||||
// user queries on "starts-with quick"
|
// user queries on "starts-with quick"
|
||||||
SpanQuery sfq = spanFirstQuery(spanTermQuery("field", "quick"), 1);
|
SpanQuery sfq = spanFirstQuery(spanTermQuery("field", "quick"), 1);
|
||||||
assertEquals(1, searcher.search(sfq, 10).totalHits);
|
assertEquals(1, searcher.search(sfq, 10).totalHits.value);
|
||||||
|
|
||||||
// user queries on "starts-with the quick"
|
// user queries on "starts-with the quick"
|
||||||
SpanQuery include = spanFirstQuery(spanTermQuery("field", "quick"), 2);
|
SpanQuery include = spanFirstQuery(spanTermQuery("field", "quick"), 2);
|
||||||
sfq = spanNotQuery(include, sfq);
|
sfq = spanNotQuery(include, sfq);
|
||||||
assertEquals(1, searcher.search(sfq, 10).totalHits);
|
assertEquals(1, searcher.search(sfq, 10).totalHits.value);
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
|
@ -341,7 +341,7 @@ public class TestSpans extends LuceneTestCase {
|
||||||
assertEquals(1,
|
assertEquals(1,
|
||||||
searcher.search(createSpan(0, true,
|
searcher.search(createSpan(0, true,
|
||||||
new SpanQuery[] {createSpan(4, false, "chased", "cat"),
|
new SpanQuery[] {createSpan(4, false, "chased", "cat"),
|
||||||
createSpan("ate")}), 10).totalHits);
|
createSpan("ate")}), 10).totalHits.value);
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -152,7 +152,7 @@ public class SearchFiles {
|
||||||
TopDocs results = searcher.search(query, 5 * hitsPerPage);
|
TopDocs results = searcher.search(query, 5 * hitsPerPage);
|
||||||
ScoreDoc[] hits = results.scoreDocs;
|
ScoreDoc[] hits = results.scoreDocs;
|
||||||
|
|
||||||
int numTotalHits = Math.toIntExact(results.totalHits);
|
int numTotalHits = Math.toIntExact(results.totalHits.value);
|
||||||
System.out.println(numTotalHits + " total matching documents");
|
System.out.println(numTotalHits + " total matching documents");
|
||||||
|
|
||||||
int start = 0;
|
int start = 0;
|
||||||
|
|
|
@ -36,7 +36,7 @@ public class TestDistanceFacetsExample extends LuceneTestCase {
|
||||||
DistanceFacetsExample example = new DistanceFacetsExample();
|
DistanceFacetsExample example = new DistanceFacetsExample();
|
||||||
example.index();
|
example.index();
|
||||||
TopDocs hits = example.drillDown(example.FIVE_KM);
|
TopDocs hits = example.drillDown(example.FIVE_KM);
|
||||||
assertEquals(2, hits.totalHits);
|
assertEquals(2, hits.totalHits.value);
|
||||||
example.close();
|
example.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class TestRangeFacetsExample extends LuceneTestCase {
|
||||||
RangeFacetsExample example = new RangeFacetsExample();
|
RangeFacetsExample example = new RangeFacetsExample();
|
||||||
example.index();
|
example.index();
|
||||||
TopDocs hits = example.drillDown(example.PAST_SIX_HOURS);
|
TopDocs hits = example.drillDown(example.PAST_SIX_HOURS);
|
||||||
assertEquals(22, hits.totalHits);
|
assertEquals(22, hits.totalHits.value);
|
||||||
example.close();
|
example.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class TestExpressionRescorer extends LuceneTestCase {
|
||||||
|
|
||||||
// Just first pass query
|
// Just first pass query
|
||||||
TopDocs hits = searcher.search(query, 10);
|
TopDocs hits = searcher.search(query, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
@ -100,7 +100,7 @@ public class TestExpressionRescorer extends LuceneTestCase {
|
||||||
Rescorer rescorer = e.getRescorer(bindings);
|
Rescorer rescorer = e.getRescorer(bindings);
|
||||||
|
|
||||||
hits = rescorer.rescore(searcher, hits, 10);
|
hits = rescorer.rescore(searcher, hits, 10);
|
||||||
assertEquals(3, hits.totalHits);
|
assertEquals(3, hits.totalHits.value);
|
||||||
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
assertEquals("2", r.document(hits.scoreDocs[0].doc).get("id"));
|
||||||
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
assertEquals("1", r.document(hits.scoreDocs[1].doc).get("id"));
|
||||||
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
assertEquals("3", r.document(hits.scoreDocs[2].doc).get("id"));
|
||||||
|
|
|
@ -140,7 +140,7 @@ public class TestExpressionSorts extends LuceneTestCase {
|
||||||
TopDocs actual = searcher.search(query, size, mutatedSort, random().nextBoolean());
|
TopDocs actual = searcher.search(query, size, mutatedSort, random().nextBoolean());
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
||||||
if (size < actual.totalHits) {
|
if (size < actual.totalHits.value) {
|
||||||
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
|
||||||
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
|
||||||
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.lucene.search.TopFieldCollector;
|
||||||
import org.apache.lucene.search.TopFieldDocs;
|
import org.apache.lucene.search.TopFieldDocs;
|
||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollector;
|
||||||
import org.apache.lucene.search.TotalHitCountCollector;
|
import org.apache.lucene.search.TotalHitCountCollector;
|
||||||
|
import org.apache.lucene.search.TotalHits;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.DocIdSetBuilder;
|
import org.apache.lucene.util.DocIdSetBuilder;
|
||||||
|
|
||||||
|
@ -219,7 +220,7 @@ public class FacetsCollector extends SimpleCollector implements Collector {
|
||||||
if (n==0) {
|
if (n==0) {
|
||||||
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
|
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
|
||||||
searcher.search(q, MultiCollector.wrap(totalHitCountCollector, fc));
|
searcher.search(q, MultiCollector.wrap(totalHitCountCollector, fc));
|
||||||
topDocs = new TopDocs(totalHitCountCollector.getTotalHits(), new ScoreDoc[0]);
|
topDocs = new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]);
|
||||||
} else {
|
} else {
|
||||||
TopDocsCollector<?> hitsCollector;
|
TopDocsCollector<?> hitsCollector;
|
||||||
if (sort != null) {
|
if (sort != null) {
|
||||||
|
|
|
@ -123,7 +123,7 @@ public class TestDrillDownQuery extends FacetTestCase {
|
||||||
q.add("a", "2");
|
q.add("a", "2");
|
||||||
q.add("b", "1");
|
q.add("b", "1");
|
||||||
TopDocs docs = searcher.search(q, 100);
|
TopDocs docs = searcher.search(q, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQuery() throws IOException {
|
public void testQuery() throws IOException {
|
||||||
|
@ -134,14 +134,14 @@ public class TestDrillDownQuery extends FacetTestCase {
|
||||||
q.add("a");
|
q.add("a");
|
||||||
QueryUtils.check(q);
|
QueryUtils.check(q);
|
||||||
TopDocs docs = searcher.search(q, 100);
|
TopDocs docs = searcher.search(q, 100);
|
||||||
assertEquals(25, docs.totalHits);
|
assertEquals(25, docs.totalHits.value);
|
||||||
|
|
||||||
// Making sure the query yields 5 documents with the facet "b" and the
|
// Making sure the query yields 5 documents with the facet "b" and the
|
||||||
// previous (facet "a") query as a base query
|
// previous (facet "a") query as a base query
|
||||||
DrillDownQuery q2 = new DrillDownQuery(config, q);
|
DrillDownQuery q2 = new DrillDownQuery(config, q);
|
||||||
q2.add("b");
|
q2.add("b");
|
||||||
docs = searcher.search(q2, 100);
|
docs = searcher.search(q2, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits.value);
|
||||||
|
|
||||||
// Making sure that a query of both facet "a" and facet "b" yields 5 results
|
// Making sure that a query of both facet "a" and facet "b" yields 5 results
|
||||||
DrillDownQuery q3 = new DrillDownQuery(config);
|
DrillDownQuery q3 = new DrillDownQuery(config);
|
||||||
|
@ -149,14 +149,14 @@ public class TestDrillDownQuery extends FacetTestCase {
|
||||||
q3.add("b");
|
q3.add("b");
|
||||||
docs = searcher.search(q3, 100);
|
docs = searcher.search(q3, 100);
|
||||||
|
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits.value);
|
||||||
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
||||||
// would gather together 10 results (10%..)
|
// would gather together 10 results (10%..)
|
||||||
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
Query fooQuery = new TermQuery(new Term("content", "foo"));
|
||||||
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
|
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
|
||||||
q4.add("b");
|
q4.add("b");
|
||||||
docs = searcher.search(q4, 100);
|
docs = searcher.search(q4, 100);
|
||||||
assertEquals(10, docs.totalHits);
|
assertEquals(10, docs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQueryImplicitDefaultParams() throws IOException {
|
public void testQueryImplicitDefaultParams() throws IOException {
|
||||||
|
@ -171,7 +171,7 @@ public class TestDrillDownQuery extends FacetTestCase {
|
||||||
DrillDownQuery q2 = new DrillDownQuery(config, q);
|
DrillDownQuery q2 = new DrillDownQuery(config, q);
|
||||||
q2.add("b");
|
q2.add("b");
|
||||||
TopDocs docs = searcher.search(q2, 100);
|
TopDocs docs = searcher.search(q2, 100);
|
||||||
assertEquals(5, docs.totalHits);
|
assertEquals(5, docs.totalHits.value);
|
||||||
|
|
||||||
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
|
||||||
// would gather together 10 results (10%..)
|
// would gather together 10 results (10%..)
|
||||||
|
@ -179,7 +179,7 @@ public class TestDrillDownQuery extends FacetTestCase {
|
||||||
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
|
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
|
||||||
q4.add("b");
|
q4.add("b");
|
||||||
docs = searcher.search(q4, 100);
|
docs = searcher.search(q4, 100);
|
||||||
assertEquals(10, docs.totalHits);
|
assertEquals(10, docs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testZeroLimit() throws IOException {
|
public void testZeroLimit() throws IOException {
|
||||||
|
|
|
@ -168,7 +168,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
DrillDownQuery ddq = new DrillDownQuery(config);
|
DrillDownQuery ddq = new DrillDownQuery(config);
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
DrillSidewaysResult r = ds.search(null, ddq, 10);
|
DrillSidewaysResult r = ds.search(null, ddq, 10);
|
||||||
assertEquals(2, r.hits.totalHits);
|
assertEquals(2, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down, and Lisa published
|
// Publish Date is only drill-down, and Lisa published
|
||||||
// one in 2012 and one in 2010:
|
// one in 2012 and one in 2010:
|
||||||
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
||||||
|
@ -188,7 +188,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(2, r.hits.totalHits);
|
assertEquals(2, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down, and Lisa published
|
// Publish Date is only drill-down, and Lisa published
|
||||||
// one in 2012 and one in 2010:
|
// one in 2012 and one in 2010:
|
||||||
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
||||||
|
@ -206,7 +206,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
ddq.add("Author", "Bob");
|
ddq.add("Author", "Bob");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(3, r.hits.totalHits);
|
assertEquals(3, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down: Lisa and Bob
|
// Publish Date is only drill-down: Lisa and Bob
|
||||||
// (drill-down) published twice in 2010 and once in 2012:
|
// (drill-down) published twice in 2010 and once in 2012:
|
||||||
assertEquals("dim=Publish Date path=[] value=3 childCount=2\n 2010 (2)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=3 childCount=2\n 2010 (2)\n 2012 (1)\n",
|
||||||
|
@ -230,7 +230,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
ddq.add("Publish Date", "2010");
|
ddq.add("Publish Date", "2010");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(1, r.hits.totalHits);
|
assertEquals(1, r.hits.totalHits.value);
|
||||||
// Publish Date is drill-sideways + drill-down: Lisa
|
// Publish Date is drill-sideways + drill-down: Lisa
|
||||||
// (drill-down) published once in 2010 and once in 2012:
|
// (drill-down) published once in 2010 and once in 2012:
|
||||||
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
||||||
|
@ -249,7 +249,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Publish Date", "2010");
|
ddq.add("Publish Date", "2010");
|
||||||
ddq.add("Author", "Bob");
|
ddq.add("Author", "Bob");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(2, r.hits.totalHits);
|
assertEquals(2, r.hits.totalHits.value);
|
||||||
// Publish Date is both drill-sideways + drill-down:
|
// Publish Date is both drill-sideways + drill-down:
|
||||||
// Lisa or Bob published twice in 2010 and once in 2012:
|
// Lisa or Bob published twice in 2010 and once in 2012:
|
||||||
assertEquals("dim=Publish Date path=[] value=3 childCount=2\n 2010 (2)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=3 childCount=2\n 2010 (2)\n 2012 (1)\n",
|
||||||
|
@ -263,7 +263,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq = new DrillDownQuery(config);
|
ddq = new DrillDownQuery(config);
|
||||||
ddq.add("Foobar", "Baz");
|
ddq.add("Foobar", "Baz");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(0, r.hits.totalHits);
|
assertEquals(0, r.hits.totalHits.value);
|
||||||
assertNull(r.facets.getTopChildren(10, "Publish Date"));
|
assertNull(r.facets.getTopChildren(10, "Publish Date"));
|
||||||
assertNull(r.facets.getTopChildren(10, "Foobar"));
|
assertNull(r.facets.getTopChildren(10, "Foobar"));
|
||||||
|
|
||||||
|
@ -272,7 +272,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
ddq.add("Author", "Tom");
|
ddq.add("Author", "Tom");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(2, r.hits.totalHits);
|
assertEquals(2, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down, and Lisa published
|
// Publish Date is only drill-down, and Lisa published
|
||||||
// one in 2012 and one in 2010:
|
// one in 2012 and one in 2010:
|
||||||
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
||||||
|
@ -289,7 +289,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
ddq.add("Author", "Tom");
|
ddq.add("Author", "Tom");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
assertEquals(2, r.hits.totalHits);
|
assertEquals(2, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down, and Lisa published
|
// Publish Date is only drill-down, and Lisa published
|
||||||
// one in 2012 and one in 2010:
|
// one in 2012 and one in 2010:
|
||||||
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=2 childCount=2\n 2010 (1)\n 2012 (1)\n",
|
||||||
|
@ -300,7 +300,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
r = ds.search(null, ddq, 10);
|
r = ds.search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(0, r.hits.totalHits);
|
assertEquals(0, r.hits.totalHits.value);
|
||||||
assertNull(r.facets.getTopChildren(10, "Publish Date"));
|
assertNull(r.facets.getTopChildren(10, "Publish Date"));
|
||||||
assertNull(r.facets.getTopChildren(10, "Author"));
|
assertNull(r.facets.getTopChildren(10, "Author"));
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -349,7 +349,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
DrillSidewaysResult r = getNewDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
|
DrillSidewaysResult r = getNewDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(1, r.hits.totalHits);
|
assertEquals(1, r.hits.totalHits.value);
|
||||||
// Publish Date is only drill-down, and Lisa published
|
// Publish Date is only drill-down, and Lisa published
|
||||||
// one in 2012 and one in 2010:
|
// one in 2012 and one in 2010:
|
||||||
assertEquals("dim=Publish Date path=[] value=1 childCount=1\n 2010 (1)\n",
|
assertEquals("dim=Publish Date path=[] value=1 childCount=1\n 2010 (1)\n",
|
||||||
|
@ -412,7 +412,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("dim", "a");
|
ddq.add("dim", "a");
|
||||||
DrillSidewaysResult r = getNewDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
|
DrillSidewaysResult r = getNewDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(3, r.hits.totalHits);
|
assertEquals(3, r.hits.totalHits.value);
|
||||||
assertEquals("dim=dim path=[] value=6 childCount=4\n a (3)\n b (1)\n c (1)\n d (1)\n",
|
assertEquals("dim=dim path=[] value=6 childCount=4\n a (3)\n b (1)\n c (1)\n d (1)\n",
|
||||||
r.facets.getTopChildren(10, "dim").toString());
|
r.facets.getTopChildren(10, "dim").toString());
|
||||||
assertEquals("dim=dim path=[a] value=3 childCount=3\n x (1)\n y (1)\n z (1)\n",
|
assertEquals("dim=dim path=[a] value=3 childCount=3\n x (1)\n y (1)\n z (1)\n",
|
||||||
|
@ -831,7 +831,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
q = new BooleanQuery.Builder().add(q, Occur.MUST).add(filter, Occur.FILTER).build();
|
q = new BooleanQuery.Builder().add(q, Occur.MUST).add(filter, Occur.FILTER).build();
|
||||||
}
|
}
|
||||||
TopDocs ddqHits = s.search(q, numDocs);
|
TopDocs ddqHits = s.search(q, numDocs);
|
||||||
assertEquals(expected.hits.size(), ddqHits.totalHits);
|
assertEquals(expected.hits.size(), ddqHits.totalHits.value);
|
||||||
for (int i = 0; i < expected.hits.size(); i++) {
|
for (int i = 0; i < expected.hits.size(); i++) {
|
||||||
// Score should be IDENTICAL:
|
// Score should be IDENTICAL:
|
||||||
assertEquals(scores.get(expected.hits.get(i).id), ddqHits.scoreDocs[i].score, 0.0f);
|
assertEquals(scores.get(expected.hits.get(i).id), ddqHits.scoreDocs[i].score, 0.0f);
|
||||||
|
@ -1033,7 +1033,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" verify totHits=" + expected.hits.size());
|
System.out.println(" verify totHits=" + expected.hits.size());
|
||||||
}
|
}
|
||||||
assertEquals(expected.hits.size(), actual.hits.totalHits);
|
assertEquals(expected.hits.size(), actual.hits.totalHits.value);
|
||||||
assertEquals(expected.hits.size(), actual.hits.scoreDocs.length);
|
assertEquals(expected.hits.size(), actual.hits.scoreDocs.length);
|
||||||
for (int i = 0; i < expected.hits.size(); i++) {
|
for (int i = 0; i < expected.hits.size(); i++) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -1143,10 +1143,10 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("Author", "Lisa");
|
ddq.add("Author", "Lisa");
|
||||||
|
|
||||||
DrillSidewaysResult r = ds.search(ddq, 10); // this used to fail on IllegalArgEx
|
DrillSidewaysResult r = ds.search(ddq, 10); // this used to fail on IllegalArgEx
|
||||||
assertEquals(0, r.hits.totalHits);
|
assertEquals(0, r.hits.totalHits.value);
|
||||||
|
|
||||||
r = ds.search(ddq, null, null, 10, new Sort(new SortField("foo", SortField.Type.INT)), false); // this used to fail on IllegalArgEx
|
r = ds.search(ddq, null, null, 10, new Sort(new SortField("foo", SortField.Type.INT)), false); // this used to fail on IllegalArgEx
|
||||||
assertEquals(0, r.hits.totalHits);
|
assertEquals(0, r.hits.totalHits.value);
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir);
|
IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir);
|
||||||
|
@ -1187,7 +1187,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
ddq.add("author", bq.build());
|
ddq.add("author", bq.build());
|
||||||
ddq.add("dim", bq.build());
|
ddq.add("dim", bq.build());
|
||||||
DrillSidewaysResult r = ds.search(null, ddq, 10);
|
DrillSidewaysResult r = ds.search(null, ddq, 10);
|
||||||
assertEquals(0, r.hits.totalHits);
|
assertEquals(0, r.hits.totalHits.value);
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
IOUtils.close(searcher.getIndexReader(), taxoReader, taxoWriter, dir, taxoDir);
|
IOUtils.close(searcher.getIndexReader(), taxoReader, taxoWriter, dir, taxoDir);
|
||||||
|
|
|
@ -91,13 +91,13 @@ public class TestFacetQuery extends FacetTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testSingleValued() throws Exception {
|
public void testSingleValued() throws Exception {
|
||||||
TopDocs topDocs = searcher.search(new FacetQuery("Author", "Mark Twain"), 10);
|
TopDocs topDocs = searcher.search(new FacetQuery("Author", "Mark Twain"), 10);
|
||||||
assertEquals(1, topDocs.totalHits);
|
assertEquals(1, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMultiValued() throws Exception {
|
public void testMultiValued() throws Exception {
|
||||||
TopDocs topDocs = searcher.search(
|
TopDocs topDocs = searcher.search(
|
||||||
new MultiFacetQuery("Author", new String[] { "Mark Twain" }, new String[] { "Kurt Vonnegut" }), 10);
|
new MultiFacetQuery("Author", new String[] { "Mark Twain" }, new String[] { "Kurt Vonnegut" }), 10);
|
||||||
assertEquals(2, topDocs.totalHits);
|
assertEquals(2, topDocs.totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -298,7 +298,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
DrillDownQuery ddq = new DrillDownQuery(config);
|
DrillDownQuery ddq = new DrillDownQuery(config);
|
||||||
DrillSidewaysResult dsr = ds.search(null, ddq, 10);
|
DrillSidewaysResult dsr = ds.search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(100, dsr.hits.totalHits);
|
assertEquals(100, dsr.hits.totalHits.value);
|
||||||
assertEquals("dim=dim path=[] value=100 childCount=2\n b (75)\n a (25)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
assertEquals("dim=dim path=[] value=100 childCount=2\n b (75)\n a (25)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
||||||
assertEquals("dim=field path=[] value=21 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (0)\n",
|
assertEquals("dim=field path=[] value=21 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (0)\n",
|
||||||
dsr.facets.getTopChildren(10, "field").toString());
|
dsr.facets.getTopChildren(10, "field").toString());
|
||||||
|
@ -308,7 +308,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
ddq.add("dim", "b");
|
ddq.add("dim", "b");
|
||||||
dsr = ds.search(null, ddq, 10);
|
dsr = ds.search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(75, dsr.hits.totalHits);
|
assertEquals(75, dsr.hits.totalHits.value);
|
||||||
assertEquals("dim=dim path=[] value=100 childCount=2\n b (75)\n a (25)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
assertEquals("dim=dim path=[] value=100 childCount=2\n b (75)\n a (25)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
||||||
assertEquals("dim=field path=[] value=16 childCount=5\n less than 10 (7)\n less than or equal to 10 (8)\n over 90 (7)\n 90 or above (8)\n over 1000 (0)\n",
|
assertEquals("dim=field path=[] value=16 childCount=5\n less than 10 (7)\n less than or equal to 10 (8)\n over 90 (7)\n 90 or above (8)\n over 1000 (0)\n",
|
||||||
dsr.facets.getTopChildren(10, "field").toString());
|
dsr.facets.getTopChildren(10, "field").toString());
|
||||||
|
@ -318,7 +318,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
ddq.add("field", LongPoint.newRangeQuery("field", 0L, 10L));
|
ddq.add("field", LongPoint.newRangeQuery("field", 0L, 10L));
|
||||||
dsr = ds.search(null, ddq, 10);
|
dsr = ds.search(null, ddq, 10);
|
||||||
|
|
||||||
assertEquals(11, dsr.hits.totalHits);
|
assertEquals(11, dsr.hits.totalHits.value);
|
||||||
assertEquals("dim=dim path=[] value=11 childCount=2\n b (8)\n a (3)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
assertEquals("dim=dim path=[] value=11 childCount=2\n b (8)\n a (3)\n", dsr.facets.getTopChildren(10, "dim").toString());
|
||||||
assertEquals("dim=field path=[] value=21 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (0)\n",
|
assertEquals("dim=field path=[] value=21 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (0)\n",
|
||||||
dsr.facets.getTopChildren(10, "field").toString());
|
dsr.facets.getTopChildren(10, "field").toString());
|
||||||
|
@ -495,7 +495,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
} else {
|
} else {
|
||||||
ddq.add("field", range.getQuery(fastMatchQuery, vs));
|
ddq.add("field", range.getQuery(fastMatchQuery, vs));
|
||||||
}
|
}
|
||||||
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
|
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -639,7 +639,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
ddq.add("field", range.getQuery(fastMatchFilter, vs));
|
ddq.add("field", range.getQuery(fastMatchFilter, vs));
|
||||||
}
|
}
|
||||||
|
|
||||||
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
|
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -841,7 +841,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
ddq.add("field", ranges[1].getQuery(fastMatchFilter, vs));
|
ddq.add("field", ranges[1].getQuery(fastMatchFilter, vs));
|
||||||
|
|
||||||
// Test simple drill-down:
|
// Test simple drill-down:
|
||||||
assertEquals(1, s.search(ddq, 10).totalHits);
|
assertEquals(1, s.search(ddq, 10).totalHits.value);
|
||||||
|
|
||||||
// Test drill-sideways after drill-down
|
// Test drill-sideways after drill-down
|
||||||
DrillSideways ds = new DrillSideways(s, config, (TaxonomyReader) null) {
|
DrillSideways ds = new DrillSideways(s, config, (TaxonomyReader) null) {
|
||||||
|
@ -860,7 +860,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
||||||
|
|
||||||
|
|
||||||
DrillSidewaysResult dsr = ds.search(ddq, 10);
|
DrillSidewaysResult dsr = ds.search(ddq, 10);
|
||||||
assertEquals(1, dsr.hits.totalHits);
|
assertEquals(1, dsr.hits.totalHits.value);
|
||||||
assertEquals("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n",
|
assertEquals("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n",
|
||||||
dsr.facets.getTopChildren(10, "field").toString());
|
dsr.facets.getTopChildren(10, "field").toString());
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
|
||||||
q.add("a", "foo");
|
q.add("a", "foo");
|
||||||
q.add("b", "baz");
|
q.add("b", "baz");
|
||||||
TopDocs hits = searcher.search(q, 1);
|
TopDocs hits = searcher.search(q, 1);
|
||||||
assertEquals(1, hits.totalHits);
|
assertEquals(1, hits.totalHits.value);
|
||||||
|
|
||||||
if (exec != null) {
|
if (exec != null) {
|
||||||
exec.shutdownNow();
|
exec.shutdownNow();
|
||||||
|
|
|
@ -261,7 +261,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
||||||
|
|
||||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, DoubleValuesSource.SCORES);
|
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, DoubleValuesSource.SCORES);
|
||||||
|
|
||||||
int expected = (int) (csq.getBoost() * td.totalHits);
|
int expected = (int) (csq.getBoost() * td.totalHits.value);
|
||||||
assertEquals(expected, facets.getSpecificValue("dim", "a").intValue());
|
assertEquals(expected, facets.getSpecificValue("dim", "a").intValue());
|
||||||
|
|
||||||
iw.close();
|
iw.close();
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue