mirror of https://github.com/apache/lucene.git
LUCENE-3600: fix BJQ to not trip assert when parent doc has no children
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1206807 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1ee685d837
commit
3187a7a26b
|
@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter; // javadocs
|
import org.apache.lucene.index.IndexWriter; // javadocs
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.BooleanClause;
|
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
|
@ -194,7 +193,7 @@ public class BlockJoinQuery extends Query {
|
||||||
private final Scorer childScorer;
|
private final Scorer childScorer;
|
||||||
private final FixedBitSet parentBits;
|
private final FixedBitSet parentBits;
|
||||||
private final ScoreMode scoreMode;
|
private final ScoreMode scoreMode;
|
||||||
private int parentDoc;
|
private int parentDoc = -1;
|
||||||
private float parentScore;
|
private float parentScore;
|
||||||
private int nextChildDoc;
|
private int nextChildDoc;
|
||||||
|
|
||||||
|
@ -324,8 +323,15 @@ public class BlockJoinQuery extends Query {
|
||||||
return parentDoc = NO_MORE_DOCS;
|
return parentDoc = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Every parent must have at least one child:
|
if (parentTarget == 0) {
|
||||||
assert parentTarget != 0;
|
// Callers should only be passing in a docID from
|
||||||
|
// the parent space, so this means this parent
|
||||||
|
// has no children (it got docID 0), so it cannot
|
||||||
|
// possibly match. We must handle this case
|
||||||
|
// separately otherwise we pass invalid -1 to
|
||||||
|
// prevSetBit below:
|
||||||
|
return nextDoc();
|
||||||
|
}
|
||||||
|
|
||||||
final int prevParentDoc = parentBits.prevSetBit(parentTarget-1);
|
final int prevParentDoc = parentBits.prevSetBit(parentTarget-1);
|
||||||
|
|
||||||
|
|
|
@ -21,11 +21,13 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.NumericField;
|
import org.apache.lucene.document.NumericField;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.LogDocMergePolicy;
|
||||||
import org.apache.lucene.index.RandomIndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.BooleanClause.Occur;
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
|
@ -36,6 +38,7 @@ import org.apache.lucene.search.join.BlockJoinQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.ReaderUtil;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
public class TestBlockJoin extends LuceneTestCase {
|
public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
@ -85,7 +88,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
IndexSearcher s = new IndexSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
|
|
||||||
// Create a filter that defines "parent" documents in the index - in this case resumes
|
// Create a filter that defines "parent" documents in the index - in this case resumes
|
||||||
Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))));
|
Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))));
|
||||||
|
@ -282,10 +285,10 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final IndexSearcher s = new IndexSearcher(r);
|
final IndexSearcher s = newSearcher(r);
|
||||||
s.setDefaultFieldSortScoring(true, true);
|
s.setDefaultFieldSortScoring(true, true);
|
||||||
|
|
||||||
final IndexSearcher joinS = new IndexSearcher(joinR);
|
final IndexSearcher joinS = newSearcher(joinR);
|
||||||
|
|
||||||
final Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("isParent", "x"))));
|
final Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("isParent", "x"))));
|
||||||
|
|
||||||
|
@ -516,7 +519,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
IndexSearcher s = new IndexSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
|
|
||||||
// Create a filter that defines "parent" documents in the index - in this case resumes
|
// Create a filter that defines "parent" documents in the index - in this case resumes
|
||||||
Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))));
|
Filter parentsFilter = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))));
|
||||||
|
@ -590,4 +593,62 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAdvanceSingleParentSingleChild() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random, dir);
|
||||||
|
Document childDoc = new Document();
|
||||||
|
childDoc.add(newField("child", "1", StringField.TYPE_UNSTORED));
|
||||||
|
Document parentDoc = new Document();
|
||||||
|
parentDoc.add(newField("parent", "1", StringField.TYPE_UNSTORED));
|
||||||
|
w.addDocuments(Arrays.asList(new Document[] {childDoc, parentDoc}));
|
||||||
|
IndexReader r = w.getReader();
|
||||||
|
w.close();
|
||||||
|
IndexSearcher s = newSearcher(r);
|
||||||
|
Query tq = new TermQuery(new Term("child", "1"));
|
||||||
|
Filter parentFilter = new CachingWrapperFilter(
|
||||||
|
new QueryWrapperFilter(
|
||||||
|
new TermQuery(new Term("parent", "1"))));
|
||||||
|
|
||||||
|
BlockJoinQuery q = new BlockJoinQuery(tq, parentFilter, BlockJoinQuery.ScoreMode.Avg);
|
||||||
|
Weight weight = s.createNormalizedWeight(q);
|
||||||
|
DocIdSetIterator disi = weight.scorer(ReaderUtil.leaves(s.getIndexReader().getTopReaderContext())[0], true, true, null);
|
||||||
|
assertEquals(1, disi.advance(1));
|
||||||
|
r.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAdvanceSingleParentNoChild() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(new LogDocMergePolicy()));
|
||||||
|
Document parentDoc = new Document();
|
||||||
|
parentDoc.add(newField("parent", "1", StringField.TYPE_UNSTORED));
|
||||||
|
parentDoc.add(newField("isparent", "yes", StringField.TYPE_UNSTORED));
|
||||||
|
w.addDocuments(Arrays.asList(new Document[] {parentDoc}));
|
||||||
|
|
||||||
|
// Add another doc so scorer is not null
|
||||||
|
parentDoc = new Document();
|
||||||
|
parentDoc.add(newField("parent", "2", StringField.TYPE_UNSTORED));
|
||||||
|
parentDoc.add(newField("isparent", "yes", StringField.TYPE_UNSTORED));
|
||||||
|
Document childDoc = new Document();
|
||||||
|
childDoc.add(newField("child", "2", StringField.TYPE_UNSTORED));
|
||||||
|
w.addDocuments(Arrays.asList(new Document[] {childDoc, parentDoc}));
|
||||||
|
|
||||||
|
// Need single seg:
|
||||||
|
w.forceMerge(1);
|
||||||
|
IndexReader r = w.getReader();
|
||||||
|
w.close();
|
||||||
|
IndexSearcher s = newSearcher(r);
|
||||||
|
Query tq = new TermQuery(new Term("child", "2"));
|
||||||
|
Filter parentFilter = new CachingWrapperFilter(
|
||||||
|
new QueryWrapperFilter(
|
||||||
|
new TermQuery(new Term("isparent", "yes"))));
|
||||||
|
|
||||||
|
BlockJoinQuery q = new BlockJoinQuery(tq, parentFilter, BlockJoinQuery.ScoreMode.Avg);
|
||||||
|
Weight weight = s.createNormalizedWeight(q);
|
||||||
|
DocIdSetIterator disi = weight.scorer(ReaderUtil.leaves(s.getIndexReader().getTopReaderContext())[0], true, true, null);
|
||||||
|
assertEquals(2, disi.advance(0));
|
||||||
|
r.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue