mirror of https://github.com/apache/lucene.git
LUCENE-4410: fix test bug - return null DocIdSet if reader has no docs for the term
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1388404 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bcb88e741e
commit
8cb409e692
|
@ -383,12 +383,13 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
/*
|
||||
* Test if the QueryFirst strategy calls the bits only if
|
||||
* the document has been matched by the query and not otherwise
|
||||
* Test if the QueryFirst strategy calls the bits only if the document has
|
||||
* been matched by the query and not otherwise
|
||||
*/
|
||||
public void testQueryFirstFilterStrategy() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter (random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
int numDocs = atLeast(50);
|
||||
int totalDocsWithZero = 0;
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
|
@ -397,26 +398,28 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
if (num == 0) {
|
||||
totalDocsWithZero++;
|
||||
}
|
||||
doc.add (newTextField("field", ""+num, Field.Store.YES));
|
||||
writer.addDocument (doc);
|
||||
doc.add(newTextField("field", "" + num, Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close ();
|
||||
writer.close();
|
||||
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new Filter() {
|
||||
Query query = new FilteredQuery(new TermQuery(new Term("field", "0")),
|
||||
new Filter() {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs)
|
||||
throws IOException {
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context,
|
||||
Bits acceptDocs) throws IOException {
|
||||
final boolean nullBitset = random().nextInt(10) == 5;
|
||||
final AtomicReader reader = context.reader();
|
||||
DocsEnum termDocsEnum = reader.termDocsEnum(new Term("field", "0"));
|
||||
final BitSet bitSet = new BitSet(reader.maxDoc());
|
||||
if (termDocsEnum != null) {
|
||||
int d;
|
||||
while((d = termDocsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
|
||||
bitSet.set(d, true);
|
||||
if (termDocsEnum == null) {
|
||||
return null; // no docs -- return null
|
||||
}
|
||||
final BitSet bitSet = new BitSet(reader.maxDoc());
|
||||
int d;
|
||||
while ((d = termDocsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
|
||||
bitSet.set(d, true);
|
||||
}
|
||||
return new DocIdSet() {
|
||||
|
||||
|
@ -429,7 +432,8 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public boolean get(int index) {
|
||||
assertTrue("filter was called for a non-matching doc", bitSet.get(index));
|
||||
assertTrue("filter was called for a non-matching doc",
|
||||
bitSet.get(index));
|
||||
return bitSet.get(index);
|
||||
}
|
||||
|
||||
|
@ -440,9 +444,12 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSetIterator iterator() throws IOException {
|
||||
assertTrue("iterator should not be called if bitset is present", nullBitset);
|
||||
assertTrue(
|
||||
"iterator should not be called if bitset is present",
|
||||
nullBitset);
|
||||
return reader.termDocsEnum(new Term("field", "0"));
|
||||
}
|
||||
|
||||
|
@ -491,6 +498,9 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
@Override
|
||||
public DocIdSetIterator iterator() throws IOException {
|
||||
final DocsEnum termDocsEnum = context.reader().termDocsEnum(new Term("field", "0"));
|
||||
if (termDocsEnum == null) {
|
||||
return null;
|
||||
}
|
||||
return new DocIdSetIterator() {
|
||||
boolean nextCalled;
|
||||
boolean advanceCalled;
|
||||
|
|
Loading…
Reference in New Issue