Make sure only relevant documents are evaluated in the second round lookup phase.
Both has_parent and has_child filters are internally executed in two rounds. In the second round all documents are evaluated whilst only specific documents need to be checked. In the has_child case only documents belonging to a specific parent type need to be checked and in the has_parent case only child documents need to be checked. Closes #3034
This commit is contained in:
parent
ae6c1b345f
commit
906f278896
|
@ -133,7 +133,9 @@ public class HasChildFilterParser implements FilterParser {
|
|||
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
||||
}
|
||||
|
||||
HasChildFilter childFilter = HasChildFilter.create(query, parentType, childType, searchContext);
|
||||
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||
HasChildFilter childFilter = new HasChildFilter(query, parentType, childType, parentFilter, searchContext);
|
||||
searchContext.addRewrite(childFilter);
|
||||
Filter filter = childFilter;
|
||||
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||
|
@ -127,16 +127,17 @@ public class HasChildQueryParser implements QueryParser {
|
|||
if (searchContext == null) {
|
||||
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
|
||||
}
|
||||
|
||||
Query query;
|
||||
if (scoreType != null) {
|
||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||
if (scoreType != null) {
|
||||
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, innerQuery, scoreType);
|
||||
searchContext.addRewrite(childrenQuery);
|
||||
query = childrenQuery;
|
||||
} else {
|
||||
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, parentType, childType, searchContext);
|
||||
HasChildFilter hasChildFilter = new HasChildFilter(innerQuery, parentType, childType, parentFilter, searchContext);
|
||||
searchContext.addRewrite(hasChildFilter);
|
||||
query = new ConstantScoreQuery(hasChildFilter);
|
||||
query = new XConstantScoreQuery(hasChildFilter);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
return query;
|
||||
|
|
|
@ -19,20 +19,26 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.search.child.HasParentFilter;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -130,9 +136,31 @@ public class HasParentFilterParser implements FilterParser {
|
|||
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set");
|
||||
}
|
||||
|
||||
HasParentFilter parentFilter = HasParentFilter.create(query, parentType, searchContext);
|
||||
searchContext.addRewrite(parentFilter);
|
||||
Filter filter = parentFilter;
|
||||
List<String> parentTypes = new ArrayList<String>(2);
|
||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper != null) {
|
||||
parentTypes.add(parentFieldMapper.type());
|
||||
}
|
||||
}
|
||||
|
||||
Filter parentFilter;
|
||||
if (parentTypes.size() == 1) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
|
||||
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
} else {
|
||||
XBooleanFilter parentsFilter = new XBooleanFilter();
|
||||
for (String parentTypeStr : parentTypes) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
|
||||
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
parentFilter = parentsFilter;
|
||||
}
|
||||
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
|
||||
HasParentFilter hasParentFilter = new HasParentFilter(query, parentType, searchContext, childrenFilter);
|
||||
searchContext.addRewrite(hasParentFilter);
|
||||
Filter filter = hasParentFilter;
|
||||
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -20,13 +20,14 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
|
@ -122,32 +123,6 @@ public class HasParentQueryParser implements QueryParser {
|
|||
throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
|
||||
}
|
||||
|
||||
List<String> childTypes = new ArrayList<String>(2);
|
||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (parentDocMapper.type().equals(parentFieldMapper.type())) {
|
||||
childTypes.add(documentMapper.type());
|
||||
}
|
||||
}
|
||||
|
||||
Filter childFilter;
|
||||
if (childTypes.size() == 1) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childTypes.get(0));
|
||||
childFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
} else {
|
||||
XBooleanFilter childrenFilter = new XBooleanFilter();
|
||||
for (String childType : childTypes) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childType);
|
||||
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
childrenFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
childFilter = childrenFilter;
|
||||
}
|
||||
|
||||
innerQuery.setBoost(boost);
|
||||
// wrap the query with type query
|
||||
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
|
||||
|
@ -156,15 +131,38 @@ public class HasParentQueryParser implements QueryParser {
|
|||
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
|
||||
}
|
||||
|
||||
List<String> parentTypes = new ArrayList<String>(2);
|
||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper != null) {
|
||||
parentTypes.add(parentFieldMapper.type());
|
||||
}
|
||||
}
|
||||
|
||||
Filter parentFilter;
|
||||
if (parentTypes.size() == 1) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
|
||||
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
} else {
|
||||
XBooleanFilter parentsFilter = new XBooleanFilter();
|
||||
for (String parentTypeStr : parentTypes) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
|
||||
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
|
||||
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
parentFilter = parentsFilter;
|
||||
}
|
||||
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
|
||||
|
||||
Query query;
|
||||
if (score) {
|
||||
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter);
|
||||
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childrenFilter);
|
||||
searchContext.addRewrite(parentQuery);
|
||||
query = parentQuery;
|
||||
} else {
|
||||
HasParentFilter hasParentFilter = HasParentFilter.create(innerQuery, parentType, searchContext);
|
||||
HasParentFilter hasParentFilter = new HasParentFilter(innerQuery, parentType, searchContext, childrenFilter);
|
||||
searchContext.addRewrite(hasParentFilter);
|
||||
query = new ConstantScoreQuery(hasParentFilter);
|
||||
query = new XConstantScoreQuery(hasParentFilter);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
return query;
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -39,14 +39,18 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class HasChildFilter extends Filter implements SearchContext.Rewrite {
|
||||
public class HasChildFilter extends Filter implements SearchContext.Rewrite {
|
||||
|
||||
final Query childQuery;
|
||||
final String parentType;
|
||||
final String childType;
|
||||
final Filter parentFilter;
|
||||
final SearchContext searchContext;
|
||||
|
||||
protected HasChildFilter(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
||||
THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
public HasChildFilter(Query childQuery, String parentType, String childType, Filter parentFilter, SearchContext searchContext) {
|
||||
this.parentFilter = parentFilter;
|
||||
this.searchContext = searchContext;
|
||||
this.parentType = parentType;
|
||||
this.childType = childType;
|
||||
|
@ -90,26 +94,20 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
public static HasChildFilter create(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
||||
return new Uid(childQuery, parentType, childType, searchContext);
|
||||
}
|
||||
|
||||
static class Uid extends HasChildFilter {
|
||||
|
||||
THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
Uid(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
||||
super(childQuery, parentType, childType, searchContext);
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||
if (collectedUids == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
||||
}
|
||||
|
||||
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null);
|
||||
if (DocIdSets.isEmpty(parentDocIdSet)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
|
||||
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||
if (idReaderTypeCache != null) {
|
||||
return new ParentDocSet(context.reader(), acceptDocs, collectedUids, idReaderTypeCache);
|
||||
return new ParentDocSet(context.reader(), parentsBits, collectedUids, idReaderTypeCache);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -137,7 +135,7 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
final THashSet<HashedBytesArray> parents;
|
||||
final IdReaderTypeCache typeCache;
|
||||
|
||||
ParentDocSet(IndexReader reader, @Nullable Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
|
||||
ParentDocSet(IndexReader reader, Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
|
||||
super(reader.maxDoc(), acceptDocs);
|
||||
this.reader = reader;
|
||||
this.parents = parents;
|
||||
|
@ -151,7 +149,8 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
}
|
||||
|
||||
final static class UidCollector extends ParentIdCollector {
|
||||
private final THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
final THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
|
||||
super(parentType, context);
|
||||
|
@ -164,5 +163,5 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
|
@ -40,16 +40,20 @@ import java.io.IOException;
|
|||
/**
|
||||
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
||||
*/
|
||||
public abstract class HasParentFilter extends Filter implements SearchContext.Rewrite {
|
||||
public class HasParentFilter extends Filter implements SearchContext.Rewrite {
|
||||
|
||||
final Query parentQuery;
|
||||
final String parentType;
|
||||
final SearchContext context;
|
||||
final Filter childrenFilter;
|
||||
|
||||
HasParentFilter(Query parentQuery, String parentType, SearchContext context) {
|
||||
THashSet<HashedBytesArray> parents;
|
||||
|
||||
public HasParentFilter(Query parentQuery, String parentType, SearchContext context, Filter childrenFilter) {
|
||||
this.parentQuery = parentQuery;
|
||||
this.parentType = parentType;
|
||||
this.context = context;
|
||||
this.childrenFilter = childrenFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -71,40 +75,20 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = parentQuery.hashCode();
|
||||
result = 31 * result + parentType.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("parent_filter[").append(parentType).append("](").append(parentQuery).append(')');
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static HasParentFilter create(Query query, String parentType, SearchContext context) {
|
||||
return new Uid(query, parentType, context);
|
||||
}
|
||||
|
||||
static class Uid extends HasParentFilter {
|
||||
|
||||
THashSet<HashedBytesArray> parents;
|
||||
|
||||
Uid(Query query, String parentType, SearchContext context) {
|
||||
super(query, parentType, context);
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||
if (parents == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
||||
}
|
||||
|
||||
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(readerContext, null);
|
||||
if (DocIdSets.isEmpty(childrenDocIdSet)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Bits childrenBits = DocIdSets.toSafeBits(readerContext.reader(), childrenDocIdSet);
|
||||
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||
if (idReaderTypeCache != null) {
|
||||
return new ChildrenDocSet(readerContext.reader(), acceptDocs, parents, idReaderTypeCache);
|
||||
return new ChildrenDocSet(readerContext.reader(), childrenBits, parents, idReaderTypeCache);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -127,13 +111,13 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
|||
parents = null;
|
||||
}
|
||||
|
||||
static class ChildrenDocSet extends MatchDocIdSet {
|
||||
final static class ChildrenDocSet extends MatchDocIdSet {
|
||||
|
||||
final IndexReader reader;
|
||||
final THashSet<HashedBytesArray> parents;
|
||||
final IdReaderTypeCache idReaderTypeCache;
|
||||
|
||||
ChildrenDocSet(IndexReader reader, @Nullable Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
|
||||
ChildrenDocSet(IndexReader reader, Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
|
||||
super(reader.maxDoc(), acceptDocs);
|
||||
this.reader = reader;
|
||||
this.parents = parents;
|
||||
|
@ -147,7 +131,7 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
|||
|
||||
}
|
||||
|
||||
static class ParentUidsCollector extends NoopCollector {
|
||||
final static class ParentUidsCollector extends NoopCollector {
|
||||
|
||||
final THashSet<HashedBytesArray> collectedUids;
|
||||
final SearchContext context;
|
||||
|
@ -174,6 +158,18 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = parentQuery.hashCode();
|
||||
result = 31 * result + parentType.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("parent_filter[").append(parentType).append("](").append(parentQuery).append(')');
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
|||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -50,16 +49,14 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
|||
private final Query originalParentQuery;
|
||||
private final String parentType;
|
||||
private final Filter childrenFilter;
|
||||
private final List<String> childTypes;
|
||||
|
||||
private Query rewrittenParentQuery;
|
||||
private TObjectFloatHashMap<HashedBytesArray> uidToScore;
|
||||
|
||||
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, List<String> childTypes, Filter childrenFilter) {
|
||||
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, Filter childrenFilter) {
|
||||
this.searchContext = searchContext;
|
||||
this.originalParentQuery = parentQuery;
|
||||
this.parentType = parentType;
|
||||
this.childTypes = childTypes;
|
||||
this.childrenFilter = childrenFilter;
|
||||
}
|
||||
|
||||
|
@ -68,7 +65,6 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
|||
this.originalParentQuery = unwritten.originalParentQuery;
|
||||
this.parentType = unwritten.parentType;
|
||||
this.childrenFilter = unwritten.childrenFilter;
|
||||
this.childTypes = unwritten.childTypes;
|
||||
|
||||
this.rewrittenParentQuery = rewrittenParentQuery;
|
||||
this.uidToScore = unwritten.uidToScore;
|
||||
|
@ -125,8 +121,8 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
|||
@Override
|
||||
public String toString(String field) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("ParentQuery[").append(parentType).append("/").append(childTypes)
|
||||
.append("](").append(originalParentQuery.toString(field)).append(')')
|
||||
sb.append("ParentQuery[").append(parentType).append("](")
|
||||
.append(originalParentQuery.toString(field)).append(')')
|
||||
.append(ToStringUtils.boost(getBoost()));
|
||||
return sb.toString();
|
||||
}
|
||||
|
@ -235,6 +231,10 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
|
|||
return null;
|
||||
}
|
||||
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||
if (idTypeCache == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new ChildScorer(this, uidToScore, childrenDocSet.iterator(), idTypeCache);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue