Added random filtered query logic the to random parent query tests.
Fixed bug in ParentQuery.ChildScorer#advance(), the parent value was checked for the _uid field while the _parent should have been checked.
This commit is contained in:
parent
292e53fe77
commit
079ac79617
|
@ -58,7 +58,7 @@ public class ParentQuery extends Query {
|
||||||
public ParentQuery(Query parentQuery, String parentType, Filter childrenFilter) {
|
public ParentQuery(Query parentQuery, String parentType, Filter childrenFilter) {
|
||||||
this.originalParentQuery = parentQuery;
|
this.originalParentQuery = parentQuery;
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
this.childrenFilter = childrenFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -137,7 +137,7 @@ public class ParentQuery extends Query {
|
||||||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), searchContext, uidToScore);
|
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), childrenFilter, searchContext, uidToScore);
|
||||||
searchContext.addReleasable(childWeight);
|
searchContext.addReleasable(childWeight);
|
||||||
return childWeight;
|
return childWeight;
|
||||||
}
|
}
|
||||||
|
@ -181,11 +181,13 @@ public class ParentQuery extends Query {
|
||||||
private class ChildWeight extends Weight implements Releasable {
|
private class ChildWeight extends Weight implements Releasable {
|
||||||
|
|
||||||
private final Weight parentWeight;
|
private final Weight parentWeight;
|
||||||
|
private final Filter childrenFilter;
|
||||||
private final SearchContext searchContext;
|
private final SearchContext searchContext;
|
||||||
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
|
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
|
||||||
|
|
||||||
private ChildWeight(Weight parentWeight, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
|
private ChildWeight(Weight parentWeight, Filter childrenFilter, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
|
||||||
this.parentWeight = parentWeight;
|
this.parentWeight = parentWeight;
|
||||||
|
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
||||||
this.searchContext = searchContext;
|
this.searchContext = searchContext;
|
||||||
this.uidToScore = uidToScore;
|
this.uidToScore = uidToScore;
|
||||||
}
|
}
|
||||||
|
@ -291,7 +293,7 @@ public class ParentQuery extends Query {
|
||||||
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
|
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
return currentChildDoc;
|
return currentChildDoc;
|
||||||
}
|
}
|
||||||
HashedBytesArray uid = typeCache.idByDoc(currentChildDoc);
|
HashedBytesArray uid = typeCache.parentIdByDoc(currentChildDoc);
|
||||||
if (uid == null) {
|
if (uid == null) {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,9 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||||
|
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
import org.elasticsearch.index.mapper.Uid;
|
||||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
|
@ -101,6 +103,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
}
|
}
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
|
boolean filterMe = rarely();
|
||||||
String child = Integer.toString(childDocId++);
|
String child = Integer.toString(childDocId++);
|
||||||
|
|
||||||
document = new Document();
|
document = new Document();
|
||||||
|
@ -110,16 +113,19 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
if (markChildAsDeleted) {
|
if (markChildAsDeleted) {
|
||||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||||
}
|
}
|
||||||
|
if (filterMe) {
|
||||||
|
document.add(new StringField("filter", "me", Field.Store.NO));
|
||||||
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
if (!markChildAsDeleted) {
|
if (!markParentAsDeleted) {
|
||||||
NavigableSet<String> childIds;
|
NavigableSet<String> childIds;
|
||||||
if (parentValueToChildDocIds.containsKey(parentValue)) {
|
if (parentValueToChildDocIds.containsKey(parentValue)) {
|
||||||
childIds = parentValueToChildDocIds.lget();
|
childIds = parentValueToChildDocIds.lget();
|
||||||
} else {
|
} else {
|
||||||
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<String>());
|
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<String>());
|
||||||
}
|
}
|
||||||
if (!markParentAsDeleted) {
|
if (!markChildAsDeleted && !filterMe) {
|
||||||
childIdToParentId.put(Integer.valueOf(child), parentDocId);
|
childIdToParentId.put(Integer.valueOf(child), parentDocId);
|
||||||
childIds.add(child);
|
childIds.add(child);
|
||||||
}
|
}
|
||||||
|
@ -139,6 +145,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
||||||
|
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
|
||||||
int max = numUniqueParentValues / 4;
|
int max = numUniqueParentValues / 4;
|
||||||
for (int i = 0; i < max; i++) {
|
for (int i = 0; i < max; i++) {
|
||||||
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
||||||
|
@ -150,6 +157,14 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
childrenFilter = rawChildrenFilter;
|
childrenFilter = rawChildrenFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||||
|
Filter filterMe;
|
||||||
|
if (random().nextBoolean()) {
|
||||||
|
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
|
||||||
|
} else {
|
||||||
|
filterMe = rawFilterMe;
|
||||||
|
}
|
||||||
|
|
||||||
// Simulate a child update
|
// Simulate a child update
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
||||||
|
@ -190,6 +205,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
query = new XFilteredQuery(query, filterMe);
|
||||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||||
searcher.search(query, collector);
|
searcher.search(query, collector);
|
||||||
FixedBitSet actualResult = collector.getResult();
|
FixedBitSet actualResult = collector.getResult();
|
||||||
|
|
|
@ -29,6 +29,8 @@ import org.apache.lucene.queries.TermFilter;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
import org.elasticsearch.index.mapper.Uid;
|
||||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
|
@ -99,6 +101,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
String child = Integer.toString(childDocId++);
|
String child = Integer.toString(childDocId++);
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
|
boolean filterMe = rarely();
|
||||||
document = new Document();
|
document = new Document();
|
||||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
|
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
|
||||||
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
|
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
|
||||||
|
@ -106,16 +109,19 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
if (markChildAsDeleted) {
|
if (markChildAsDeleted) {
|
||||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||||
}
|
}
|
||||||
|
if (filterMe) {
|
||||||
|
document.add(new StringField("filter", "me", Field.Store.NO));
|
||||||
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
if (!markChildAsDeleted) {
|
if (!markParentAsDeleted) {
|
||||||
NavigableMap<String, Float> childIdToScore;
|
NavigableMap<String, Float> childIdToScore;
|
||||||
if (parentValueToChildIds.containsKey(parentValue)) {
|
if (parentValueToChildIds.containsKey(parentValue)) {
|
||||||
childIdToScore = parentValueToChildIds.lget();
|
childIdToScore = parentValueToChildIds.lget();
|
||||||
} else {
|
} else {
|
||||||
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<String, Float>());
|
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<String, Float>());
|
||||||
}
|
}
|
||||||
if (!markParentAsDeleted) {
|
if (!markChildAsDeleted && !filterMe) {
|
||||||
assert !childIdToScore.containsKey(child);
|
assert !childIdToScore.containsKey(child);
|
||||||
childIdToScore.put(child, 1f);
|
childIdToScore.put(child, 1f);
|
||||||
childIdToParentId.put(Integer.valueOf(child), parentDocId);
|
childIdToParentId.put(Integer.valueOf(child), parentDocId);
|
||||||
|
@ -136,6 +142,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
||||||
|
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
|
||||||
int max = numUniqueParentValues / 4;
|
int max = numUniqueParentValues / 4;
|
||||||
for (int i = 0; i < max; i++) {
|
for (int i = 0; i < max; i++) {
|
||||||
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
||||||
|
@ -147,6 +154,14 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
childrenFilter = rawChildrenFilter;
|
childrenFilter = rawChildrenFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||||
|
Filter filterMe;
|
||||||
|
if (random().nextBoolean()) {
|
||||||
|
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
|
||||||
|
} else {
|
||||||
|
filterMe = rawFilterMe;
|
||||||
|
}
|
||||||
|
|
||||||
// Simulate a child update
|
// Simulate a child update
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
||||||
|
@ -176,6 +191,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
||||||
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
|
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
|
||||||
Query query = new ParentQuery(parentQuery,"parent", childrenFilter);
|
Query query = new ParentQuery(parentQuery,"parent", childrenFilter);
|
||||||
|
query = new XFilteredQuery(query, filterMe);
|
||||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||||
int numHits = 1 + random().nextInt(25);
|
int numHits = 1 + random().nextInt(25);
|
||||||
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
||||||
|
|
Loading…
Reference in New Issue