mirror of https://github.com/apache/lucene.git
LUCENE-2785: add new TotalHitCountCollector (only counts hits); don't allow numHits=0 to TSDC or TFC
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1040935 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
297b3b75c4
commit
f1ce9abcfb
|
@ -158,11 +158,11 @@ public class IndexSearcher extends Searcher {
|
|||
@Override
|
||||
public TopDocs search(Weight weight, Filter filter, int nDocs) throws IOException {
|
||||
|
||||
if (nDocs <= 0) {
|
||||
throw new IllegalArgumentException("nDocs must be > 0");
|
||||
int limit = reader.maxDoc();
|
||||
if (limit == 0) {
|
||||
limit = 1;
|
||||
}
|
||||
|
||||
nDocs = Math.min(nDocs, reader.maxDoc());
|
||||
nDocs = Math.min(nDocs, limit);
|
||||
|
||||
TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, !weight.scoresDocsOutOfOrder());
|
||||
search(weight, filter, collector);
|
||||
|
@ -190,7 +190,11 @@ public class IndexSearcher extends Searcher {
|
|||
Sort sort, boolean fillFields)
|
||||
throws IOException {
|
||||
|
||||
nDocs = Math.min(nDocs, reader.maxDoc());
|
||||
int limit = reader.maxDoc();
|
||||
if (limit == 0) {
|
||||
limit = 1;
|
||||
}
|
||||
nDocs = Math.min(nDocs, limit);
|
||||
|
||||
TopFieldCollector collector = TopFieldCollector.create(sort, nDocs,
|
||||
fillFields, fieldSortDoTrackScores, fieldSortDoMaxScore, !weight.scoresDocsOutOfOrder());
|
||||
|
|
|
@ -913,6 +913,10 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
|||
throw new IllegalArgumentException("Sort must contain at least one field");
|
||||
}
|
||||
|
||||
if (numHits <= 0) {
|
||||
throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
|
||||
}
|
||||
|
||||
FieldValueHitQueue queue = FieldValueHitQueue.create(sort.fields, numHits);
|
||||
if (queue.getComparators().length == 1) {
|
||||
if (docsScoredInOrder) {
|
||||
|
|
|
@ -109,6 +109,10 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
|||
*/
|
||||
public static TopScoreDocCollector create(int numHits, boolean docsScoredInOrder) {
|
||||
|
||||
if (numHits <= 0) {
|
||||
throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
|
||||
}
|
||||
|
||||
if (docsScoredInOrder) {
|
||||
return new InOrderTopScoreDocCollector(numHits);
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
||||
/**
|
||||
* Just counts the total number of hits.
|
||||
*/
|
||||
|
||||
public class TotalHitCountCollector extends Collector {
|
||||
private int totalHits;
|
||||
|
||||
/** Returns how many hits matched the search. */
|
||||
public int getTotalHits() {
|
||||
return totalHits;
|
||||
}
|
||||
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
public void collect(int doc) {
|
||||
totalHits++;
|
||||
}
|
||||
|
||||
public void setNextReader(IndexReader reader, int docBase) {
|
||||
}
|
||||
|
||||
public boolean acceptsDocsOutOfOrder() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -26,9 +26,6 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestSuite;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -1104,7 +1101,7 @@ public class TestSort extends LuceneTestCase implements Serializable {
|
|||
private void assertMatches(String msg, Searcher searcher, Query query, Sort sort,
|
||||
String expectedResult) throws IOException {
|
||||
//ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
|
||||
TopDocs hits = searcher.search (query, null, expectedResult.length(), sort);
|
||||
TopDocs hits = searcher.search (query, null, Math.max(1, expectedResult.length()), sort);
|
||||
ScoreDoc[] result = hits.scoreDocs;
|
||||
assertEquals(expectedResult.length(),hits.totalHits);
|
||||
StringBuilder buff = new StringBuilder(10);
|
||||
|
@ -1197,4 +1194,23 @@ public class TestSort extends LuceneTestCase implements Serializable {
|
|||
indexStore.close();
|
||||
}
|
||||
|
||||
public void testCountingCollector() throws Exception {
|
||||
Directory indexStore = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, indexStore);
|
||||
for (int i=0; i<5; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add (new Field ("string", "a"+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
doc.add (new Field ("string", "b"+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument (doc);
|
||||
}
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
TotalHitCountCollector c = new TotalHitCountCollector();
|
||||
searcher.search(new MatchAllDocsQuery(), null, c);
|
||||
assertEquals(5, c.getTotalHits());
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue