Make sure only relevant documents are evaluated in the second round lookup phase.
Both has_parent and has_child filters are internally executed in two rounds. In the second round all documents are evaluated whilst only specific documents need to be checked. In the has_child case only documents belonging to a specific parent type need to be checked and in the has_parent case only child documents need to be checked. Closes #3034
This commit is contained in:
parent
ae6c1b345f
commit
906f278896
|
@ -133,7 +133,9 @@ public class HasChildFilterParser implements FilterParser {
|
||||||
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
||||||
}
|
}
|
||||||
|
|
||||||
HasChildFilter childFilter = HasChildFilter.create(query, parentType, childType, searchContext);
|
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||||
|
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||||
|
HasChildFilter childFilter = new HasChildFilter(query, parentType, childType, parentFilter, searchContext);
|
||||||
searchContext.addRewrite(childFilter);
|
searchContext.addRewrite(childFilter);
|
||||||
Filter filter = childFilter;
|
Filter filter = childFilter;
|
||||||
|
|
||||||
|
|
|
@ -19,12 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.ConstantScoreQuery;
|
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||||
|
@ -127,16 +127,17 @@ public class HasChildQueryParser implements QueryParser {
|
||||||
if (searchContext == null) {
|
if (searchContext == null) {
|
||||||
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Query query;
|
Query query;
|
||||||
|
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||||
if (scoreType != null) {
|
if (scoreType != null) {
|
||||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
|
||||||
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, innerQuery, scoreType);
|
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, innerQuery, scoreType);
|
||||||
searchContext.addRewrite(childrenQuery);
|
searchContext.addRewrite(childrenQuery);
|
||||||
query = childrenQuery;
|
query = childrenQuery;
|
||||||
} else {
|
} else {
|
||||||
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, parentType, childType, searchContext);
|
HasChildFilter hasChildFilter = new HasChildFilter(innerQuery, parentType, childType, parentFilter, searchContext);
|
||||||
searchContext.addRewrite(hasChildFilter);
|
searchContext.addRewrite(hasChildFilter);
|
||||||
query = new ConstantScoreQuery(hasChildFilter);
|
query = new XConstantScoreQuery(hasChildFilter);
|
||||||
}
|
}
|
||||||
query.setBoost(boost);
|
query.setBoost(boost);
|
||||||
return query;
|
return query;
|
||||||
|
|
|
@ -19,20 +19,26 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.BooleanClause;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
||||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
import org.elasticsearch.index.search.child.HasParentFilter;
|
import org.elasticsearch.index.search.child.HasParentFilter;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -130,9 +136,31 @@ public class HasParentFilterParser implements FilterParser {
|
||||||
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set");
|
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set");
|
||||||
}
|
}
|
||||||
|
|
||||||
HasParentFilter parentFilter = HasParentFilter.create(query, parentType, searchContext);
|
List<String> parentTypes = new ArrayList<String>(2);
|
||||||
searchContext.addRewrite(parentFilter);
|
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||||
Filter filter = parentFilter;
|
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||||
|
if (parentFieldMapper != null) {
|
||||||
|
parentTypes.add(parentFieldMapper.type());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Filter parentFilter;
|
||||||
|
if (parentTypes.size() == 1) {
|
||||||
|
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
|
||||||
|
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||||
|
} else {
|
||||||
|
XBooleanFilter parentsFilter = new XBooleanFilter();
|
||||||
|
for (String parentTypeStr : parentTypes) {
|
||||||
|
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
|
||||||
|
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||||
|
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||||
|
}
|
||||||
|
parentFilter = parentsFilter;
|
||||||
|
}
|
||||||
|
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
|
||||||
|
HasParentFilter hasParentFilter = new HasParentFilter(query, parentType, searchContext, childrenFilter);
|
||||||
|
searchContext.addRewrite(hasParentFilter);
|
||||||
|
Filter filter = hasParentFilter;
|
||||||
|
|
||||||
if (cache) {
|
if (cache) {
|
||||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||||
|
|
|
@ -20,13 +20,14 @@
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.BooleanClause;
|
import org.apache.lucene.search.BooleanClause;
|
||||||
import org.apache.lucene.search.ConstantScoreQuery;
|
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
@ -122,32 +123,6 @@ public class HasParentQueryParser implements QueryParser {
|
||||||
throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
|
throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> childTypes = new ArrayList<String>(2);
|
|
||||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
|
||||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
|
||||||
if (parentFieldMapper == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parentDocMapper.type().equals(parentFieldMapper.type())) {
|
|
||||||
childTypes.add(documentMapper.type());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Filter childFilter;
|
|
||||||
if (childTypes.size() == 1) {
|
|
||||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childTypes.get(0));
|
|
||||||
childFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
|
||||||
} else {
|
|
||||||
XBooleanFilter childrenFilter = new XBooleanFilter();
|
|
||||||
for (String childType : childTypes) {
|
|
||||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childType);
|
|
||||||
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
|
||||||
childrenFilter.add(filter, BooleanClause.Occur.SHOULD);
|
|
||||||
}
|
|
||||||
childFilter = childrenFilter;
|
|
||||||
}
|
|
||||||
|
|
||||||
innerQuery.setBoost(boost);
|
innerQuery.setBoost(boost);
|
||||||
// wrap the query with type query
|
// wrap the query with type query
|
||||||
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
|
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
|
||||||
|
@ -156,15 +131,38 @@ public class HasParentQueryParser implements QueryParser {
|
||||||
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
|
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
List<String> parentTypes = new ArrayList<String>(2);
|
||||||
|
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||||
|
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||||
|
if (parentFieldMapper != null) {
|
||||||
|
parentTypes.add(parentFieldMapper.type());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Filter parentFilter;
|
||||||
|
if (parentTypes.size() == 1) {
|
||||||
|
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
|
||||||
|
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||||
|
} else {
|
||||||
|
XBooleanFilter parentsFilter = new XBooleanFilter();
|
||||||
|
for (String parentTypeStr : parentTypes) {
|
||||||
|
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
|
||||||
|
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||||
|
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||||
|
}
|
||||||
|
parentFilter = parentsFilter;
|
||||||
|
}
|
||||||
|
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
|
||||||
|
|
||||||
Query query;
|
Query query;
|
||||||
if (score) {
|
if (score) {
|
||||||
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter);
|
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childrenFilter);
|
||||||
searchContext.addRewrite(parentQuery);
|
searchContext.addRewrite(parentQuery);
|
||||||
query = parentQuery;
|
query = parentQuery;
|
||||||
} else {
|
} else {
|
||||||
HasParentFilter hasParentFilter = HasParentFilter.create(innerQuery, parentType, searchContext);
|
HasParentFilter hasParentFilter = new HasParentFilter(innerQuery, parentType, searchContext, childrenFilter);
|
||||||
searchContext.addRewrite(hasParentFilter);
|
searchContext.addRewrite(hasParentFilter);
|
||||||
query = new ConstantScoreQuery(hasParentFilter);
|
query = new XConstantScoreQuery(hasParentFilter);
|
||||||
}
|
}
|
||||||
query.setBoost(boost);
|
query.setBoost(boost);
|
||||||
return query;
|
return query;
|
||||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.Nullable;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
@ -39,14 +39,18 @@ import java.io.IOException;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public abstract class HasChildFilter extends Filter implements SearchContext.Rewrite {
|
public class HasChildFilter extends Filter implements SearchContext.Rewrite {
|
||||||
|
|
||||||
final Query childQuery;
|
final Query childQuery;
|
||||||
final String parentType;
|
final String parentType;
|
||||||
final String childType;
|
final String childType;
|
||||||
|
final Filter parentFilter;
|
||||||
final SearchContext searchContext;
|
final SearchContext searchContext;
|
||||||
|
|
||||||
protected HasChildFilter(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
THashSet<HashedBytesArray> collectedUids;
|
||||||
|
|
||||||
|
public HasChildFilter(Query childQuery, String parentType, String childType, Filter parentFilter, SearchContext searchContext) {
|
||||||
|
this.parentFilter = parentFilter;
|
||||||
this.searchContext = searchContext;
|
this.searchContext = searchContext;
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childType = childType;
|
this.childType = childType;
|
||||||
|
@ -90,79 +94,74 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HasChildFilter create(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||||
return new Uid(childQuery, parentType, childType, searchContext);
|
if (collectedUids == null) {
|
||||||
}
|
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
||||||
|
|
||||||
static class Uid extends HasChildFilter {
|
|
||||||
|
|
||||||
THashSet<HashedBytesArray> collectedUids;
|
|
||||||
|
|
||||||
Uid(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
|
||||||
super(childQuery, parentType, childType, searchContext);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null);
|
||||||
if (collectedUids == null) {
|
if (DocIdSets.isEmpty(parentDocIdSet)) {
|
||||||
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
|
||||||
if (idReaderTypeCache != null) {
|
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||||
return new ParentDocSet(context.reader(), acceptDocs, collectedUids, idReaderTypeCache);
|
if (idReaderTypeCache != null) {
|
||||||
} else {
|
return new ParentDocSet(context.reader(), parentsBits, collectedUids, idReaderTypeCache);
|
||||||
return null;
|
} else {
|
||||||
}
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||||
|
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||||
|
collectedUids = CacheRecycler.popHashSet();
|
||||||
|
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
|
||||||
|
searchContext.searcher().search(childQuery, collector);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void contextClear() {
|
||||||
|
if (collectedUids != null) {
|
||||||
|
CacheRecycler.pushHashSet(collectedUids);
|
||||||
|
}
|
||||||
|
collectedUids = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
final static class ParentDocSet extends MatchDocIdSet {
|
||||||
|
|
||||||
|
final IndexReader reader;
|
||||||
|
final THashSet<HashedBytesArray> parents;
|
||||||
|
final IdReaderTypeCache typeCache;
|
||||||
|
|
||||||
|
ParentDocSet(IndexReader reader, Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
|
||||||
|
super(reader.maxDoc(), acceptDocs);
|
||||||
|
this.reader = reader;
|
||||||
|
this.parents = parents;
|
||||||
|
this.typeCache = typeCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
protected boolean matchDoc(int doc) {
|
||||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
return parents.contains(typeCache.idByDoc(doc));
|
||||||
collectedUids = CacheRecycler.popHashSet();
|
}
|
||||||
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
|
}
|
||||||
searchContext.searcher().search(childQuery, collector);
|
|
||||||
|
final static class UidCollector extends ParentIdCollector {
|
||||||
|
|
||||||
|
final THashSet<HashedBytesArray> collectedUids;
|
||||||
|
|
||||||
|
UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
|
||||||
|
super(parentType, context);
|
||||||
|
this.collectedUids = collectedUids;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void contextClear() {
|
public void collect(int doc, HashedBytesArray parentIdByDoc){
|
||||||
if (collectedUids != null) {
|
collectedUids.add(parentIdByDoc);
|
||||||
CacheRecycler.pushHashSet(collectedUids);
|
|
||||||
}
|
|
||||||
collectedUids = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final static class ParentDocSet extends MatchDocIdSet {
|
|
||||||
|
|
||||||
final IndexReader reader;
|
|
||||||
final THashSet<HashedBytesArray> parents;
|
|
||||||
final IdReaderTypeCache typeCache;
|
|
||||||
|
|
||||||
ParentDocSet(IndexReader reader, @Nullable Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
|
|
||||||
super(reader.maxDoc(), acceptDocs);
|
|
||||||
this.reader = reader;
|
|
||||||
this.parents = parents;
|
|
||||||
this.typeCache = typeCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
return parents.contains(typeCache.idByDoc(doc));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final static class UidCollector extends ParentIdCollector {
|
|
||||||
private final THashSet<HashedBytesArray> collectedUids;
|
|
||||||
|
|
||||||
UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
|
|
||||||
super(parentType, context);
|
|
||||||
this.collectedUids = collectedUids;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int doc, HashedBytesArray parentIdByDoc){
|
|
||||||
collectedUids.add(parentIdByDoc);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.Nullable;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
|
@ -40,16 +40,20 @@ import java.io.IOException;
|
||||||
/**
|
/**
|
||||||
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
||||||
*/
|
*/
|
||||||
public abstract class HasParentFilter extends Filter implements SearchContext.Rewrite {
|
public class HasParentFilter extends Filter implements SearchContext.Rewrite {
|
||||||
|
|
||||||
final Query parentQuery;
|
final Query parentQuery;
|
||||||
final String parentType;
|
final String parentType;
|
||||||
final SearchContext context;
|
final SearchContext context;
|
||||||
|
final Filter childrenFilter;
|
||||||
|
|
||||||
HasParentFilter(Query parentQuery, String parentType, SearchContext context) {
|
THashSet<HashedBytesArray> parents;
|
||||||
|
|
||||||
|
public HasParentFilter(Query parentQuery, String parentType, SearchContext context, Filter childrenFilter) {
|
||||||
this.parentQuery = parentQuery;
|
this.parentQuery = parentQuery;
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.context = context;
|
this.context = context;
|
||||||
|
this.childrenFilter = childrenFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -71,6 +75,89 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||||
|
if (parents == null) {
|
||||||
|
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
||||||
|
}
|
||||||
|
|
||||||
|
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(readerContext, null);
|
||||||
|
if (DocIdSets.isEmpty(childrenDocIdSet)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Bits childrenBits = DocIdSets.toSafeBits(readerContext.reader(), childrenDocIdSet);
|
||||||
|
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
|
if (idReaderTypeCache != null) {
|
||||||
|
return new ChildrenDocSet(readerContext.reader(), childrenBits, parents, idReaderTypeCache);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||||
|
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||||
|
parents = CacheRecycler.popHashSet();
|
||||||
|
ParentUidsCollector collector = new ParentUidsCollector(parents, context, parentType);
|
||||||
|
searchContext.searcher().search(parentQuery, collector);
|
||||||
|
parents = collector.collectedUids;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void contextClear() {
|
||||||
|
if (parents != null) {
|
||||||
|
CacheRecycler.pushHashSet(parents);
|
||||||
|
}
|
||||||
|
parents = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
final static class ChildrenDocSet extends MatchDocIdSet {
|
||||||
|
|
||||||
|
final IndexReader reader;
|
||||||
|
final THashSet<HashedBytesArray> parents;
|
||||||
|
final IdReaderTypeCache idReaderTypeCache;
|
||||||
|
|
||||||
|
ChildrenDocSet(IndexReader reader, Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
|
||||||
|
super(reader.maxDoc(), acceptDocs);
|
||||||
|
this.reader = reader;
|
||||||
|
this.parents = parents;
|
||||||
|
this.idReaderTypeCache = idReaderTypeCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean matchDoc(int doc) {
|
||||||
|
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
final static class ParentUidsCollector extends NoopCollector {
|
||||||
|
|
||||||
|
final THashSet<HashedBytesArray> collectedUids;
|
||||||
|
final SearchContext context;
|
||||||
|
final String parentType;
|
||||||
|
|
||||||
|
IdReaderTypeCache typeCache;
|
||||||
|
|
||||||
|
ParentUidsCollector(THashSet<HashedBytesArray> collectedUids, SearchContext context, String parentType) {
|
||||||
|
this.collectedUids = collectedUids;
|
||||||
|
this.context = context;
|
||||||
|
this.parentType = parentType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void collect(int doc) throws IOException {
|
||||||
|
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
||||||
|
if (typeCache != null) {
|
||||||
|
collectedUids.add(typeCache.idByDoc(doc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||||
|
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
int result = parentQuery.hashCode();
|
int result = parentQuery.hashCode();
|
||||||
|
@ -85,96 +172,5 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HasParentFilter create(Query query, String parentType, SearchContext context) {
|
|
||||||
return new Uid(query, parentType, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Uid extends HasParentFilter {
|
|
||||||
|
|
||||||
THashSet<HashedBytesArray> parents;
|
|
||||||
|
|
||||||
Uid(Query query, String parentType, SearchContext context) {
|
|
||||||
super(query, parentType, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
|
||||||
if (parents == null) {
|
|
||||||
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
|
||||||
}
|
|
||||||
|
|
||||||
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
|
||||||
if (idReaderTypeCache != null) {
|
|
||||||
return new ChildrenDocSet(readerContext.reader(), acceptDocs, parents, idReaderTypeCache);
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
|
||||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
|
||||||
parents = CacheRecycler.popHashSet();
|
|
||||||
ParentUidsCollector collector = new ParentUidsCollector(parents, context, parentType);
|
|
||||||
searchContext.searcher().search(parentQuery, collector);
|
|
||||||
parents = collector.collectedUids;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextClear() {
|
|
||||||
if (parents != null) {
|
|
||||||
CacheRecycler.pushHashSet(parents);
|
|
||||||
}
|
|
||||||
parents = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static class ChildrenDocSet extends MatchDocIdSet {
|
|
||||||
|
|
||||||
final IndexReader reader;
|
|
||||||
final THashSet<HashedBytesArray> parents;
|
|
||||||
final IdReaderTypeCache idReaderTypeCache;
|
|
||||||
|
|
||||||
ChildrenDocSet(IndexReader reader, @Nullable Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
|
|
||||||
super(reader.maxDoc(), acceptDocs);
|
|
||||||
this.reader = reader;
|
|
||||||
this.parents = parents;
|
|
||||||
this.idReaderTypeCache = idReaderTypeCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static class ParentUidsCollector extends NoopCollector {
|
|
||||||
|
|
||||||
final THashSet<HashedBytesArray> collectedUids;
|
|
||||||
final SearchContext context;
|
|
||||||
final String parentType;
|
|
||||||
|
|
||||||
IdReaderTypeCache typeCache;
|
|
||||||
|
|
||||||
ParentUidsCollector(THashSet<HashedBytesArray> collectedUids, SearchContext context, String parentType) {
|
|
||||||
this.collectedUids = collectedUids;
|
|
||||||
this.context = context;
|
|
||||||
this.parentType = parentType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
|
||||||
if (typeCache != null) {
|
|
||||||
collectedUids.add(typeCache.idByDoc(doc));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
|
||||||
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -50,16 +49,14 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
||||||
private final Query originalParentQuery;
|
private final Query originalParentQuery;
|
||||||
private final String parentType;
|
private final String parentType;
|
||||||
private final Filter childrenFilter;
|
private final Filter childrenFilter;
|
||||||
private final List<String> childTypes;
|
|
||||||
|
|
||||||
private Query rewrittenParentQuery;
|
private Query rewrittenParentQuery;
|
||||||
private TObjectFloatHashMap<HashedBytesArray> uidToScore;
|
private TObjectFloatHashMap<HashedBytesArray> uidToScore;
|
||||||
|
|
||||||
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, List<String> childTypes, Filter childrenFilter) {
|
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, Filter childrenFilter) {
|
||||||
this.searchContext = searchContext;
|
this.searchContext = searchContext;
|
||||||
this.originalParentQuery = parentQuery;
|
this.originalParentQuery = parentQuery;
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childTypes = childTypes;
|
|
||||||
this.childrenFilter = childrenFilter;
|
this.childrenFilter = childrenFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,7 +65,6 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
||||||
this.originalParentQuery = unwritten.originalParentQuery;
|
this.originalParentQuery = unwritten.originalParentQuery;
|
||||||
this.parentType = unwritten.parentType;
|
this.parentType = unwritten.parentType;
|
||||||
this.childrenFilter = unwritten.childrenFilter;
|
this.childrenFilter = unwritten.childrenFilter;
|
||||||
this.childTypes = unwritten.childTypes;
|
|
||||||
|
|
||||||
this.rewrittenParentQuery = rewrittenParentQuery;
|
this.rewrittenParentQuery = rewrittenParentQuery;
|
||||||
this.uidToScore = unwritten.uidToScore;
|
this.uidToScore = unwritten.uidToScore;
|
||||||
|
@ -125,8 +121,8 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
||||||
@Override
|
@Override
|
||||||
public String toString(String field) {
|
public String toString(String field) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("ParentQuery[").append(parentType).append("/").append(childTypes)
|
sb.append("ParentQuery[").append(parentType).append("](")
|
||||||
.append("](").append(originalParentQuery.toString(field)).append(')')
|
.append(originalParentQuery.toString(field)).append(')')
|
||||||
.append(ToStringUtils.boost(getBoost()));
|
.append(ToStringUtils.boost(getBoost()));
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
@ -235,6 +231,10 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||||
|
if (idTypeCache == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return new ChildScorer(this, uidToScore, childrenDocSet.iterator(), idTypeCache);
|
return new ChildScorer(this, uidToScore, childrenDocSet.iterator(), idTypeCache);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue