Added scoring support to `has_child` and `has_parent` queries.

Added score support to `has_child` and `has_parent` queries. Both queries support a score_type option. The has_child support the same options as the top_children query and the none option which is the default and yields the current behaviour. The has_parent query support the score type options: score and none. The latter is the default and yields the current behaviour.

If the score_type is set to a value other than none then the has_parent query map the matched parent score into the related children documents. The has_child query then map the matched children documents into the related parent document. The score_type on both queries defines how the children documents scores are mapped in the parent documents. Both queries are executed in two phases. First phase collects the parent uid values of matching documents with an aggregated score per parent uid value. In the second phase either child or parent typed documents are emitted as hit that have the same parent uid value as found during the first phase. The score computed in the first phase will be used as score.

Closes #2502
This commit is contained in:
Martijn van Groningen 2012-12-24 11:39:43 +01:00
parent bb9c7172b0
commit d57d89937f
14 changed files with 1222 additions and 111 deletions

View File

@ -44,6 +44,7 @@ public class CacheRecycler {
shortIntHashMap.clear();
longIntHashMap.clear();
objectIntHashMap.clear();
objectFloatHashMap.clear();
objectArray.clear();
intArray.clear();
}
@ -71,6 +72,7 @@ public class CacheRecycler {
private static SoftWrapper<Queue<ExtTHashMap>> hashMap = new SoftWrapper<Queue<ExtTHashMap>>();
@SuppressWarnings("unchecked")
public static <K, V> ExtTHashMap<K, V> popHashMap() {
Queue<ExtTHashMap> ref = hashMap.get();
if (ref == null) {
@ -97,6 +99,7 @@ public class CacheRecycler {
private static SoftWrapper<Queue<THashSet>> hashSet = new SoftWrapper<Queue<THashSet>>();
@SuppressWarnings("unchecked")
public static <T> THashSet<T> popHashSet() {
Queue<THashSet> ref = hashSet.get();
if (ref == null) {
@ -123,6 +126,7 @@ public class CacheRecycler {
private static SoftWrapper<Queue<ExtTDoubleObjectHashMap>> doubleObjectHashMap = new SoftWrapper<Queue<ExtTDoubleObjectHashMap>>();
@SuppressWarnings("unchecked")
public static <T> ExtTDoubleObjectHashMap<T> popDoubleObjectMap() {
Queue<ExtTDoubleObjectHashMap> ref = doubleObjectHashMap.get();
if (ref == null) {
@ -149,6 +153,7 @@ public class CacheRecycler {
private static SoftWrapper<Queue<ExtTLongObjectHashMap>> longObjectHashMap = new SoftWrapper<Queue<ExtTLongObjectHashMap>>();
@SuppressWarnings("unchecked")
public static <T> ExtTLongObjectHashMap<T> popLongObjectMap() {
Queue<ExtTLongObjectHashMap> ref = longObjectHashMap.get();
if (ref == null) {
@ -391,6 +396,33 @@ public class CacheRecycler {
ref.add(map);
}
// ------ TObjectFloatHashMap -----
private static SoftWrapper<Queue<TObjectFloatHashMap>> objectFloatHashMap = new SoftWrapper<Queue<TObjectFloatHashMap>>();
@SuppressWarnings({"unchecked"})
public static <T> TObjectFloatHashMap<T> popObjectFloatMap() {
Queue<TObjectFloatHashMap> ref = objectFloatHashMap.get();
if (ref == null) {
return new TObjectFloatHashMap();
}
TObjectFloatHashMap map = ref.poll();
if (map == null) {
return new TObjectFloatHashMap();
}
return map;
}
public static <T> void pushObjectFloatMap(TObjectFloatHashMap<T> map) {
Queue<TObjectFloatHashMap> ref = objectFloatHashMap.get();
if (ref == null) {
ref = ConcurrentCollections.newQueue();
objectFloatHashMap.set(ref);
}
map.clear();
ref.add(map);
}
// ----- int[] -----
private static SoftWrapper<Queue<Object[]>> objectArray = new SoftWrapper<Queue<Object[]>>();

View File

@ -36,6 +36,8 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
private float boost = 1.0f;
private String scoreType;
private String executionType;
public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) {
@ -61,9 +63,18 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
return this;
}
/**
* Defines how the scores from the matching child documents are mapped into the parent document.
*/
public HasChildQueryBuilder scoreType(String executionType) {
this.scoreType = executionType;
return this;
}
/**
* Expert: Sets the low level child to parent filtering implementation. Can be: 'bitset' or 'uid'
*
* Only applicable when score_type is set to none.
* <p/>
* This option is experimental and will be removed.
*/
public HasChildQueryBuilder executionType(String executionType) {
@ -83,8 +94,8 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
if (boost != 1.0f) {
builder.field("boost", boost);
}
if (executionType != null) {
builder.field("execution_type", executionType);
if (scoreType != null) {
builder.field("score_type", scoreType);
}
builder.endObject();
}

View File

@ -20,13 +20,15 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.HasChildFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -51,13 +53,14 @@ public class HasChildQueryParser implements QueryParser {
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
Query innerQuery = null;
boolean queryFound = false;
float boost = 1.0f;
String childType = null;
String scope = null;
ScoreType scoreType = null;
String executionType = "uid";
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -69,7 +72,7 @@ public class HasChildQueryParser implements QueryParser {
// since we switch types, make sure we change the context
String[] origTypes = QueryParseContext.setTypesWithPrevious(childType == null ? null : new String[]{childType});
try {
query = parseContext.parseInnerQuery();
innerQuery = parseContext.parseInnerQuery();
queryFound = true;
} finally {
QueryParseContext.setTypes(origTypes);
@ -82,10 +85,15 @@ public class HasChildQueryParser implements QueryParser {
childType = parser.text();
} else if ("_scope".equals(currentFieldName)) {
scope = parser.text();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {
scoreType = ScoreType.fromString(parser.text());
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
String scoreTypeValue = parser.text();
if (!"none".equals(scoreTypeValue)) {
scoreType = ScoreType.fromString(scoreTypeValue);
}
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed.
executionType = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[has_child] query does not support [" + currentFieldName + "]");
}
@ -94,12 +102,13 @@ public class HasChildQueryParser implements QueryParser {
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[has_child] requires 'query' field");
}
if (query == null) {
if (innerQuery == null) {
return null;
}
if (childType == null) {
throw new QueryParsingException(parseContext.index(), "[has_child] requires 'type' field");
}
innerQuery.setBoost(boost);
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
@ -109,17 +118,22 @@ public class HasChildQueryParser implements QueryParser {
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] does not have parent mapping");
}
String parentType = childDocMapper.parentFieldMapper().type();
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
query.setBoost(boost);
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
HasChildFilter childFilter = HasChildFilter.create(query, scope, parentType, childType, searchContext, executionType);
// we don't need DeletionAwareConstantScore, since we filter deleted parent docs in the filter
ConstantScoreQuery childQuery = new ConstantScoreQuery(childFilter);
childQuery.setBoost(boost);
searchContext.addScopePhase(childFilter);
return childQuery;
Query query;
if (scoreType != null) {
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, scope, innerQuery, scoreType);
searchContext.addScopePhase(childrenQuery);
query = childrenQuery;
} else {
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, scope, parentType, childType, searchContext, executionType);
searchContext.addScopePhase(hasChildFilter);
query = new ConstantScoreQuery(hasChildFilter);
}
query.setBoost(boost);
return query;
}
}

View File

@ -30,12 +30,13 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
private final QueryBuilder queryBuilder;
private final String parentType;
private String scope;
private String scoreType;
private String executionType;
private String scope;
private float boost = 1.0f;
/**
* @param parentType The parent type
* @param parentType The parent type
* @param parentQuery The query that will be matched with parent documents
*/
public HasParentQueryBuilder(String parentType, QueryBuilder parentQuery) {
@ -48,9 +49,23 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
return this;
}
public HasParentQueryBuilder boost(float boost) {
this.boost = boost;
return this;
}
/**
* Expert: Sets the low level parent to child filtering implementation. Can be: 'bitset' or 'uid'
*
* Defines how the parent score is mapped into the child documents.
*/
public HasParentQueryBuilder scoreType(String scoreType) {
this.scoreType = scoreType;
return this;
}
/**
* Expert: Sets the low level child to parent filtering implementation. Can be: 'bitset' or 'uid'
* <p/>
* Only applicable when score_type is set to none.
* This option is experimental and will be removed.
*/
public HasParentQueryBuilder executionType(String executionType) {
@ -58,11 +73,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
return this;
}
public HasParentQueryBuilder boost(float boost) {
this.boost = boost;
return this;
}
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(HasParentQueryParser.NAME);
builder.field("query");
@ -71,6 +81,9 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
if (scope != null) {
builder.field("_scope", scope);
}
if (scoreType != null) {
builder.field("score_type", scoreType);
}
if (executionType != null) {
builder.field("execution_type", executionType);
}

View File

@ -19,19 +19,25 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.child.HasParentFilter;
import org.elasticsearch.index.search.child.ParentQuery;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
// Same parse logic HasParentQueryFilter, but also parses boost and wraps filter in constant score query
public class HasParentQueryParser implements QueryParser {
public static final String NAME = "has_parent";
@ -49,12 +55,13 @@ public class HasParentQueryParser implements QueryParser {
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
Query innerQuery = null;
boolean queryFound = false;
float boost = 1.0f;
String parentType = null;
String executionType = "uid";
String scope = null;
boolean score = false;
String executionType = "uid";
String currentFieldName = null;
XContentParser.Token token;
@ -66,7 +73,7 @@ public class HasParentQueryParser implements QueryParser {
// TODO handle `query` element before `type` element...
String[] origTypes = QueryParseContext.setTypesWithPrevious(parentType == null ? null : new String[]{parentType});
try {
query = parseContext.parseInnerQuery();
innerQuery = parseContext.parseInnerQuery();
queryFound = true;
} finally {
QueryParseContext.setTypes(origTypes);
@ -79,8 +86,15 @@ public class HasParentQueryParser implements QueryParser {
parentType = parser.text();
} else if ("_scope".equals(currentFieldName)) {
scope = parser.text();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) { // This option is experimental and will most likely be removed.
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {
executionType = parser.text();
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
String scoreTypeValue = parser.text();
if ("score".equals(scoreTypeValue)) {
score = true;
} else if ("none".equals(scoreTypeValue)) {
score = false;
}
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
@ -91,7 +105,7 @@ public class HasParentQueryParser implements QueryParser {
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[parent] query requires 'query' field");
}
if (query == null) {
if (innerQuery == null) {
return null;
}
@ -104,16 +118,48 @@ public class HasParentQueryParser implements QueryParser {
throw new QueryParsingException(parseContext.index(), "[parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
}
query.setBoost(boost);
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
HasParentFilter parentFilter = HasParentFilter.create(executionType, query, scope, parentType, searchContext);
List<String> childTypes = new ArrayList<String>(2);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper == null) {
continue;
}
ConstantScoreQuery parentQuery = new ConstantScoreQuery(parentFilter);
parentQuery.setBoost(boost);
searchContext.addScopePhase(parentFilter);
return parentQuery;
if (parentDocMapper.type().equals(parentFieldMapper.type())) {
childTypes.add(documentMapper.type());
}
}
Filter childFilter;
if (childTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childTypes.get(0));
childFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
} else {
XBooleanFilter childrenFilter = new XBooleanFilter();
for (String childType : childTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childType);
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
childrenFilter.add(filter, BooleanClause.Occur.SHOULD);
}
childFilter = childrenFilter;
}
innerQuery.setBoost(boost);
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
Query query;
if (score) {
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter, scope);
searchContext.addScopePhase(parentQuery);
query = parentQuery;
} else {
HasParentFilter hasParentFilter = HasParentFilter.create(executionType, innerQuery, scope, parentType, searchContext);
searchContext.addScopePhase(hasParentFilter);
query = new ConstantScoreQuery(hasParentFilter);
}
query.setBoost(boost);
return query;
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.child.TopChildrenQuery;
import org.elasticsearch.search.internal.SearchContext;
@ -55,7 +56,7 @@ public class TopChildrenQueryParser implements QueryParser {
float boost = 1.0f;
String childType = null;
String scope = null;
TopChildrenQuery.ScoreType scoreType = TopChildrenQuery.ScoreType.MAX;
ScoreType scoreType = ScoreType.MAX;
int factor = 5;
int incrementalFactor = 2;
@ -84,7 +85,7 @@ public class TopChildrenQueryParser implements QueryParser {
} else if ("_scope".equals(currentFieldName)) {
scope = parser.text();
} else if ("score".equals(currentFieldName)) {
scoreType = TopChildrenQuery.ScoreType.fromString(parser.text());
scoreType = ScoreType.fromString(parser.text());
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("factor".equals(currentFieldName)) {

View File

@ -0,0 +1,381 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import gnu.trove.map.TObjectFloatMap;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectFloatHashMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.ScopePhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Set;
/**
* A query implementation that executes the wrapped child query and
* connects the matching child docs to the related parent documents
* using the {@link IdReaderTypeCache}.
*/
public class ChildrenQuery extends Query implements ScopePhase.CollectorPhase {
private final SearchContext searchContext;
private final String parentType;
private final String childType;
private final Filter parentFilter;
private final ScoreType scoreType;
private final String scope;
private final Query childQuery;
private TObjectFloatHashMap<HashedBytesArray> uidToScore;
private TObjectIntHashMap<HashedBytesArray> uidToCount;
public ChildrenQuery(SearchContext searchContext, String parentType, String childType, Filter parentFilter, String scope, Query childQuery, ScoreType scoreType) {
this.searchContext = searchContext;
this.parentType = parentType;
this.childType = childType;
this.parentFilter = parentFilter;
this.scope = scope;
this.childQuery = childQuery;
this.scoreType = scoreType;
}
private ChildrenQuery(ChildrenQuery unProcessedQuery, Query rewrittenChildQuery) {
this.searchContext = unProcessedQuery.searchContext;
this.parentType = unProcessedQuery.parentType;
this.childType = unProcessedQuery.childType;
this.parentFilter = unProcessedQuery.parentFilter;
this.scope = unProcessedQuery.scope;
this.scoreType = unProcessedQuery.scoreType;
this.childQuery = rewrittenChildQuery;
this.uidToScore = unProcessedQuery.uidToScore;
this.uidToCount = unProcessedQuery.uidToCount;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("ChildrenQuery[").append(childType).append("/").append(parentType).append("](").append(childQuery
.toString(field)).append(')').append(ToStringUtils.boost(getBoost()));
return sb.toString();
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query rewrittenChildQuery = childQuery.rewrite(reader);
if (rewrittenChildQuery == childQuery) {
return this;
}
int index = searchContext.scopePhases().indexOf(this);
ChildrenQuery rewrite = new ChildrenQuery(this, rewrittenChildQuery);
searchContext.scopePhases().set(index, rewrite);
return rewrite;
}
@Override
public void extractTerms(Set<Term> terms) {
childQuery.extractTerms(terms);
}
@Override
public boolean requiresProcessing() {
return uidToScore == null;
}
@Override
public Collector collector() {
uidToScore = CacheRecycler.popObjectFloatMap();
switch (scoreType) {
case AVG:
uidToCount = CacheRecycler.popObjectIntMap();
return new AvgChildUidCollector(scoreType, searchContext, parentType, uidToScore, uidToCount);
default:
return new ChildUidCollector(scoreType, searchContext, parentType, uidToScore);
}
}
@Override
public void processCollector(Collector collector) {
// Do nothing, we already have the references to the child scores and optionally the child count.
}
@Override
public String scope() {
return scope;
}
@Override
public void clear() {
if (uidToScore != null) {
CacheRecycler.pushObjectFloatMap(uidToScore);
}
uidToScore = null;
if (uidToCount != null) {
CacheRecycler.pushObjectIntMap(uidToCount);
}
uidToCount = null;
}
@Override
public Query query() {
return childQuery;
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (uidToScore == null) {
throw new ElasticSearchIllegalStateException("has_child query hasn't executed properly");
}
return new ParentWeight(childQuery.createWeight(searcher));
}
class ParentWeight extends Weight {
final Weight childWeight;
public ParentWeight(Weight childWeight) {
this.childWeight = childWeight;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ChildrenQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = childWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
DocIdSet parentsSet = parentFilter.getDocIdSet(context, acceptDocs);
if (parentsSet == null || parentsSet == DocIdSet.EMPTY_DOCIDSET) {
return null;
}
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
DocIdSetIterator parentsIterator = parentsSet.iterator();
switch (scoreType) {
case AVG:
return new AvgParentScorer(this, idTypeCache, uidToScore, uidToCount, parentsIterator);
default:
return new ParentScorer(this, idTypeCache, uidToScore, parentsIterator);
}
}
}
static class ParentScorer extends Scorer {
final IdReaderTypeCache idTypeCache;
final TObjectFloatMap<HashedBytesArray> uidToScore;
final DocIdSetIterator parentsIterator;
int currentDocId = -1;
float currentScore;
ParentScorer(Weight weight, IdReaderTypeCache idTypeCache, TObjectFloatMap<HashedBytesArray> uidToScore, DocIdSetIterator parentsIterator) {
super(weight);
this.idTypeCache = idTypeCache;
this.uidToScore = uidToScore;
this.parentsIterator = parentsIterator;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public float freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentDocId;
}
@Override
public int nextDoc() throws IOException {
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
HashedBytesArray uid = idTypeCache.idByDoc(currentDocId);
currentScore = uidToScore.get(uid);
if (Float.compare(currentScore, 0) > 0) {
return currentDocId;
}
}
}
@Override
public int advance(int target) throws IOException {
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
HashedBytesArray uid = idTypeCache.idByDoc(currentDocId);
currentScore = uidToScore.get(uid);
if (Float.compare(currentScore, 0) > 0) {
return currentDocId;
} else {
return nextDoc();
}
}
}
static class AvgParentScorer extends ParentScorer {
final TObjectIntMap<HashedBytesArray> uidToCount;
HashedBytesArray currentUid;
AvgParentScorer(Weight weight, IdReaderTypeCache idTypeCache, TObjectFloatMap<HashedBytesArray> uidToScore, TObjectIntMap<HashedBytesArray> uidToCount, DocIdSetIterator parentsIterator) {
super(weight, idTypeCache, uidToScore, parentsIterator);
this.uidToCount = uidToCount;
}
@Override
public int nextDoc() throws IOException {
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
currentUid = idTypeCache.idByDoc(currentDocId);
currentScore = uidToScore.get(currentUid);
if (Float.compare(currentScore, 0) > 0) {
currentScore /= uidToCount.get(currentUid);
return currentDocId;
}
}
}
}
static class ChildUidCollector extends NoopCollector {
final TObjectFloatHashMap<HashedBytesArray> uidToScore;
final ScoreType scoreType;
final SearchContext searchContext;
final String childType;
Scorer scorer;
IdReaderTypeCache typeCache;
ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap<HashedBytesArray> uidToScore) {
this.uidToScore = uidToScore;
this.scoreType = scoreType;
this.searchContext = searchContext;
this.childType = childType;
}
@Override
public void collect(int doc) throws IOException {
HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
float previousScore = uidToScore.get(parentUid);
float currentScore = scorer.score();
if (Float.compare(previousScore, 0) == 0) {
uidToScore.put(parentUid, currentScore);
} else {
switch (scoreType) {
case SUM:
uidToScore.adjustValue(parentUid, currentScore);
break;
case MAX:
if (Float.compare(previousScore, currentScore) < 0) {
uidToScore.put(parentUid, currentScore);
}
break;
}
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
typeCache = searchContext.idCache().reader(context.reader()).type(childType);
}
}
static class AvgChildUidCollector extends ChildUidCollector {
final TObjectIntHashMap<HashedBytesArray> uidToCount;
AvgChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap<HashedBytesArray> uidToScore, TObjectIntHashMap<HashedBytesArray> uidToCount) {
super(scoreType, searchContext, childType, uidToScore);
this.uidToCount = uidToCount;
assert scoreType == ScoreType.AVG;
}
@Override
public void collect(int doc) throws IOException {
HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
float previousScore = uidToScore.get(parentUid);
float currentScore = scorer.score();
if (Float.compare(previousScore, 0) == 0) {
uidToScore.put(parentUid, currentScore);
uidToCount.put(parentUid, 1);
} else {
uidToScore.adjustValue(parentUid, currentScore);
uidToCount.increment(parentUid);
}
}
}
}

View File

@ -111,7 +111,7 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (parentDocs == null) {
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
}
// np need to use acceptDocs, since the parentDocs were collected with a collector, which means those
@ -145,7 +145,7 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (collectedUids == null) {
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
}
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);

View File

@ -110,7 +110,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
if (parents == null) {
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
}
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
@ -196,7 +196,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
if (parentDocs == null) {
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
}
return new ChildrenDocSet(readerContext.reader(), acceptDocs, parentDocs, context, parentType);

View File

@ -0,0 +1,287 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import gnu.trove.map.hash.TObjectFloatHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.ScopePhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
* A query implementation that executes the wrapped parent query and
* connects the matching parent docs to the related child documents
* using the {@link IdReaderTypeCache}.
*/
public class ParentQuery extends Query implements ScopePhase.CollectorPhase {
private final SearchContext searchContext;
private final Query parentQuery;
private final String parentType;
private final Filter childrenFilter;
private final List<String> childTypes;
private final String scope;
private TObjectFloatHashMap<HashedBytesArray> uidToScore;
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, List<String> childTypes, Filter childrenFilter, String scope) {
this.searchContext = searchContext;
this.parentQuery = parentQuery;
this.parentType = parentType;
this.childTypes = childTypes;
this.childrenFilter = childrenFilter;
this.scope = scope;
}
private ParentQuery(ParentQuery unwritten, Query rewrittenParentQuery) {
this.searchContext = unwritten.searchContext;
this.parentQuery = rewrittenParentQuery;
this.parentType = unwritten.parentType;
this.childrenFilter = unwritten.childrenFilter;
this.childTypes = unwritten.childTypes;
this.scope = unwritten.scope;
this.uidToScore = unwritten.uidToScore;
}
@Override
public boolean requiresProcessing() {
return uidToScore == null;
}
@Override
public Collector collector() {
uidToScore = CacheRecycler.popObjectFloatMap();
return new ParentUidCollector(uidToScore, searchContext, parentType);
}
@Override
public void processCollector(Collector collector) {
// Do nothing, we already have the references to the parent scores.
}
@Override
public String scope() {
return scope;
}
@Override
public void clear() {
if (uidToScore != null) {
CacheRecycler.pushObjectFloatMap(uidToScore);
}
uidToScore = null;
}
@Override
public Query query() {
return parentQuery;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("ParentQuery[").append(parentType).append("/").append(childTypes)
.append("](").append(parentQuery.toString(field)).append(')')
.append(ToStringUtils.boost(getBoost()));
return sb.toString();
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query rewrittenChildQuery = parentQuery.rewrite(reader);
if (rewrittenChildQuery == parentQuery) {
return this;
}
ParentQuery rewrite = new ParentQuery(this, rewrittenChildQuery);
int index = searchContext.scopePhases().indexOf(this);
searchContext.scopePhases().set(index, rewrite);
return rewrite;
}
@Override
public void extractTerms(Set<Term> terms) {
parentQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (uidToScore == null) {
throw new ElasticSearchIllegalStateException("has_parent query hasn't executed properly");
}
return new ChildWeight(parentQuery.createWeight(searcher));
}
static class ParentUidCollector extends NoopCollector {
final TObjectFloatHashMap<HashedBytesArray> uidToScore;
final SearchContext searchContext;
final String parentType;
Scorer scorer;
IdReaderTypeCache typeCache;
ParentUidCollector(TObjectFloatHashMap<HashedBytesArray> uidToScore, SearchContext searchContext, String parentType) {
this.uidToScore = uidToScore;
this.searchContext = searchContext;
this.parentType = parentType;
}
@Override
public void collect(int doc) throws IOException {
HashedBytesArray parentUid = typeCache.idByDoc(doc);
uidToScore.put(parentUid, scorer.score());
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
typeCache = searchContext.idCache().reader(context.reader()).type(parentType);
}
}
class ChildWeight extends Weight {
private final Weight parentWeight;
ChildWeight(Weight parentWeight) {
this.parentWeight = parentWeight;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ParentQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = parentWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (childrenDocSet == null || childrenDocSet == DocIdSet.EMPTY_DOCIDSET) {
return null;
}
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
return new ChildScorer(this, uidToScore, childrenDocSet.iterator(), idTypeCache);
}
}
static class ChildScorer extends Scorer {
final TObjectFloatHashMap<HashedBytesArray> uidToScore;
final DocIdSetIterator childrenIterator;
final IdReaderTypeCache typeCache;
int currentChildDoc = -1;
float currentScore;
ChildScorer(Weight weight, TObjectFloatHashMap<HashedBytesArray> uidToScore, DocIdSetIterator childrenIterator, IdReaderTypeCache typeCache) {
super(weight);
this.uidToScore = uidToScore;
this.childrenIterator = childrenIterator;
this.typeCache = typeCache;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public float freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentChildDoc;
}
@Override
public int nextDoc() throws IOException {
while (true) {
currentChildDoc = childrenIterator.nextDoc();
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
BytesReference uid = typeCache.parentIdByDoc(currentChildDoc);
if (uid == null) {
continue;
}
currentScore = uidToScore.get(uid);
if (Float.compare(currentScore, 0) != 0) {
return currentChildDoc;
}
}
}
@Override
public int advance(int target) throws IOException {
currentChildDoc = childrenIterator.advance(target);
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
BytesReference uid = typeCache.idByDoc(currentChildDoc);
if (uid == null) {
return nextDoc();
}
currentScore = uidToScore.get(uid);
if (Float.compare(currentScore, 0) == 0) {
return nextDoc();
}
return currentChildDoc;
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
/**
* Defines how scores from child documents are mapped into the parent document.
*/
public enum ScoreType {
/**
* Only the highest score of all matching child documents is mapped into the parent.
*/
MAX,
/**
* The average score based on all matching child documents are mapped into the parent.
*/
AVG,
/**
* The matching children scores is summed up and mapped into the parent.
*/
SUM;
public static ScoreType fromString(String type) {
if ("max".equals(type)) {
return MAX;
} else if ("avg".equals(type)) {
return AVG;
} else if ("sum".equals(type)) {
return SUM;
}
throw new ElasticSearchIllegalArgumentException("No score type for child query [" + type + "] found");
}
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.EmptyScorer;
@ -39,23 +38,6 @@ import java.util.*;
*/
public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
public static enum ScoreType {
MAX,
AVG,
SUM;
public static ScoreType fromString(String type) {
if ("max".equals(type)) {
return MAX;
} else if ("avg".equals(type)) {
return AVG;
} else if ("sum".equals(type)) {
return SUM;
}
throw new ElasticSearchIllegalArgumentException("No score type for child query [" + type + "] found");
}
}
private Query query;
private String scope;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.benchmark.search.child;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchResponse;
@ -30,8 +31,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.SizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.node.Node;
import java.io.IOException;
@ -42,6 +41,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.hasChildFilter;
import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
@ -78,7 +78,7 @@ public class ChildSearchBenchmark {
StopWatch stopWatch = new StopWatch().start();
System.out.println("--> Indexing [" + COUNT + "] ...");
System.out.println("--> Indexing [" + COUNT + "] parent document and [" + (COUNT * CHILD_COUNT) + " child documents");
long ITERS = COUNT / BATCH;
long i = 1;
int counter = 0;
@ -136,14 +136,22 @@ public class ChildSearchBenchmark {
}
System.out.println("--> Just Child Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
String[] executionTypes = new String[]{"uid", "bitset"};// either uid (faster, in general a bit more memory) or bitset (slower, but in general a bit less memory)
NodesStatsResponse statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).execute().actionGet();
System.out.println("--> Committed heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapCommitted());
System.out.println("--> Used heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapUsed());
String[] executionTypes = new String[]{"uid"/*, "bitset"*/};// either uid (faster, in general a bit more memory) or bitset (slower, but in general a bit less memory)
for (String executionType : executionTypes) {
System.out.println("--> Running has_child query with execution type " + executionType);
System.out.println("--> Running has_child filter with execution type " + executionType);
// run parent child constant query
for (int j = 0; j < QUERY_WARMUP; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(
hasChildQuery("child", termQuery("tag", "tag1")).executionType(executionType)
filteredQuery(
matchAllQuery(),
hasChildFilter("child", termQuery("tag", "tag1")).executionType(executionType)
)
)
.execute().actionGet();
if (searchResponse.failedShards() > 0) {
@ -158,7 +166,10 @@ public class ChildSearchBenchmark {
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(
hasChildQuery("child", termQuery("tag", "tag1")).executionType(executionType)
filteredQuery(
matchAllQuery(),
hasChildFilter("child", termQuery("tag", "tag1")).executionType(executionType)
)
)
.execute().actionGet();
if (searchResponse.failedShards() > 0) {
@ -169,14 +180,17 @@ public class ChildSearchBenchmark {
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_child[" + executionType + "] Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> has_child[" + executionType + "] filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> Running has_child[" + executionType + "] filter with match_all child query");
totalQueryTime = 0;
for (int j = 1; j <= QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(
hasChildQuery("child", matchAllQuery()).executionType(executionType)
filteredQuery(
matchAllQuery(),
hasChildFilter("child", matchAllQuery()).executionType(executionType)
)
)
.execute().actionGet();
if (searchResponse.failedShards() > 0) {
@ -188,7 +202,7 @@ public class ChildSearchBenchmark {
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_child[" + executionType + "] with match_all child query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> has_child[" + executionType + "] filter with match_all child query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
}
for (String executionType : executionTypes) {
@ -223,7 +237,7 @@ public class ChildSearchBenchmark {
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_parent[" + executionType + "] Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> has_parent[" + executionType + "] filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> Running has_parent[" + executionType + "] filter with match_all parent query ");
totalQueryTime = 0;
@ -241,7 +255,7 @@ public class ChildSearchBenchmark {
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_parent[" + executionType + "] with match_all parent query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> has_parent[" + executionType + "] filter with match_all parent query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
}
System.out.println("--> Running top_children query");
// run parent child score query
@ -264,6 +278,78 @@ public class ChildSearchBenchmark {
}
System.out.println("--> top_children Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).setIndices(true).execute().actionGet();
System.out.println("--> Id cache size: " + statsResponse.nodes()[0].getIndices().cache().getIdCacheSize());
System.out.println("--> Used heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapUsed());
System.out.println("--> Running has_child query");
// run parent child score query
for (int j = 0; j < QUERY_WARMUP; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasChildQuery("child", termQuery("tag", "tag1")).scoreType("max")).execute().actionGet();
if (searchResponse.hits().totalHits() != COUNT) {
System.err.println("mismatch on hits");
}
}
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasChildQuery("child", termQuery("tag", "tag1")).scoreType("max")).execute().actionGet();
if (searchResponse.hits().totalHits() != COUNT) {
System.err.println("mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_child Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasChildQuery("child", matchAllQuery()).scoreType("max")).execute().actionGet();
long expected = (COUNT / BATCH) * BATCH;
if (searchResponse.hits().totalHits() != expected) {
System.err.println("mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_child query with match_all Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
System.out.println("--> Running has_parent query");
// run parent child score query
for (int j = 0; j < QUERY_WARMUP; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasParentQuery("parent", termQuery("name", "test1")).scoreType("score")).execute().actionGet();
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
System.err.println("mismatch on hits");
}
}
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasParentQuery("parent", termQuery("name", "test1")).scoreType("score")).execute().actionGet();
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
System.err.println("mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_parent Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch().setQuery(hasParentQuery("parent", matchAllQuery()).scoreType("score")).execute().actionGet();
if (searchResponse.hits().totalHits() != 5000000) {
System.err.println("mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> has_parent query with match_all Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
statsResponse = client.admin().cluster().prepareNodesStats()
.setJvm(true).setIndices(true).execute().actionGet();
System.out.println("--> Id cache size: " + statsResponse.nodes()[0].getIndices().cache().getIdCacheSize());
System.out.println("--> Used heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapUsed());
client.close();
node1.close();
}

View File

@ -25,8 +25,9 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
@ -38,6 +39,7 @@ import java.util.List;
import java.util.Map;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.hasChildFilter;
import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter;
import static org.elasticsearch.index.query.QueryBuilders.*;
@ -84,10 +86,10 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("grandchild").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("grandchild").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "child").endObject()
.endObject().endObject()).execute().actionGet();
@ -123,7 +125,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -216,7 +218,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
// HAS CHILD QUERY
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute().actionGet();
if (searchResponse.failedShards() > 0) {
logger.warn("Failed shards:");
for (ShardSearchFailure shardSearchFailure : searchResponse.shardFailures()) {
@ -227,13 +229,13 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
@ -282,14 +284,14 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
assertThat(searchResponse.hits().getAt(1).id(), equalTo("c2"));
// HAS PARENT QUERY
searchResponse = client.prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value2")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value2"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("c3"));
assertThat(searchResponse.hits().getAt(1).id(), equalTo("c4"));
searchResponse = client.prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
@ -307,7 +309,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -369,7 +371,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -417,19 +419,19 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
// HAS CHILD QUERY
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
@ -468,7 +470,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -516,19 +518,19 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
// HAS CHILD QUERY
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red")).executionType(getExecutionMethod())).execute().actionGet();
searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
@ -568,7 +570,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -612,7 +614,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -677,7 +679,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -692,12 +694,12 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
client.admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client.prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryString("c_field:*"))).executionType(getExecutionMethod())))
.setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryString("c_field:*")))))
.execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
searchResponse = client.prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryString("p_field:*"))).executionType(getExecutionMethod())))
.setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryString("p_field:*")))))
.execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
@ -708,12 +710,12 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
}
@Test
public void testFixAOBEIfTopChildrenIswrappedInMusNotClause() throws Exception {
public void testFixAOBEIfTopChildrenIsWrappedInMusNotClause() throws Exception {
client.admin().indices().prepareDelete().execute().actionGet();
client.admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -744,7 +746,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -798,7 +800,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(
XContentFactory.jsonBuilder()
jsonBuilder()
.startObject()
.startObject("type")
.startObject("_parent")
@ -841,7 +843,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().preparePutMapping("test").setType("child").setSource(XContentFactory.jsonBuilder().startObject().startObject("type")
client.admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
@ -862,16 +864,217 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
assertThat(countResponse.shardFailures().get(0).reason().contains("top_children query hasn't executed properly"), equalTo(true));
countResponse = client.prepareCount("test")
.setQuery(hasChildQuery("child", termQuery("c_field1", "2")).executionType(getExecutionMethod()))
.setQuery(hasChildQuery("child", termQuery("c_field1", "2")).scoreType("max"))
.execute().actionGet();
assertThat(countResponse.failedShards(), equalTo(1));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_child filter/query hasn't executed properly"), equalTo(true));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_child query hasn't executed properly"), equalTo(true));
countResponse = client.prepareCount("test")
.setQuery(hasParentQuery("parent", termQuery("p_field1", "1")).executionType(getExecutionMethod()))
.setQuery(hasParentQuery("parent", termQuery("p_field1", "1")).scoreType("score"))
.execute().actionGet();
assertThat(countResponse.failedShards(), equalTo(1));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent filter/query hasn't executed properly"), equalTo(true));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent query hasn't executed properly"), equalTo(true));
countResponse = client.prepareCount("test")
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field1", "2")).executionType(getExecutionMethod())))
.execute().actionGet();
assertThat(countResponse.failedShards(), equalTo(1));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_child filter hasn't executed properly"), equalTo(true));
countResponse = client.prepareCount("test")
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field1", "1")).executionType(getExecutionMethod())))
.execute().actionGet();
assertThat(countResponse.failedShards(), equalTo(1));
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent filter hasn't executed properly"), equalTo(true));
}
@Test
public void testScoreForParentChildQueries() throws Exception {
client.admin().indices().prepareDelete().execute().actionGet();
client.admin().indices().prepareCreate("test")
.addMapping("child", jsonBuilder()
.startObject()
.startObject("type")
.startObject("_parent")
.field("type", "parent")
.endObject()
.endObject()
.endObject()
).addMapping("child1", jsonBuilder()
.startObject()
.startObject("type")
.startObject("_parent")
.field("type", "parent")
.endObject()
.endObject()
.endObject()
).setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
// Parent 1 and its children
client.prepareIndex("test", "parent", "1")
.setSource("p_field", "p_value1")
.execute().actionGet();
client.prepareIndex("test", "child", "1")
.setSource("c_field1", 1, "c_field2", 0)
.setParent("1").execute().actionGet();
client.prepareIndex("test", "child", "2")
.setSource("c_field1", 1, "c_field2", 0)
.setParent("1").execute().actionGet();
client.prepareIndex("test", "child", "3")
.setSource("c_field1", 2, "c_field2", 0)
.setParent("1").execute().actionGet();
client.prepareIndex("test", "child", "4")
.setSource("c_field1", 2, "c_field2", 0)
.setParent("1").execute().actionGet();
client.prepareIndex("test", "child", "5")
.setSource("c_field1", 1, "c_field2", 1)
.setParent("1").execute().actionGet();
client.prepareIndex("test", "child", "6")
.setSource("c_field1", 1, "c_field2", 2)
.setParent("1").execute().actionGet();
// Parent 2 and its children
client.prepareIndex("test", "parent", "2")
.setSource("p_field", "p_value2")
.execute().actionGet();
client.prepareIndex("test", "child", "7")
.setSource("c_field1", 3, "c_field2", 0)
.setParent("2").execute().actionGet();
client.prepareIndex("test", "child", "8")
.setSource("c_field1", 1, "c_field2", 1)
.setParent("2").execute().actionGet();
client.prepareIndex("test", "child", "9")
.setSource("c_field1", 1, "c_field2", 1)
.setParent("p").execute().actionGet();
client.prepareIndex("test", "child", "10")
.setSource("c_field1", 1, "c_field2", 1)
.setParent("2").execute().actionGet();
client.prepareIndex("test", "child", "11")
.setSource("c_field1", 1, "c_field2", 1)
.setParent("2").execute().actionGet();
client.prepareIndex("test", "child", "12")
.setSource("c_field1", 1, "c_field2", 2)
.setParent("2").execute().actionGet();
// Parent 3 and its children
client.prepareIndex("test", "parent", "3")
.setSource("p_field1", "p_value3", "p_field2", 5)
.execute().actionGet();
client.prepareIndex("test", "child", "13")
.setSource("c_field1", 4, "c_field2", 0, "c_field3", 0)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child", "14")
.setSource("c_field1", 1, "c_field2", 1, "c_field3", 1)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child", "15")
.setSource("c_field1", 1, "c_field2", 2, "c_field3", 2)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child", "16")
.setSource("c_field1", 1, "c_field2", 2, "c_field3", 3)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child", "17")
.setSource("c_field1", 1, "c_field2", 2, "c_field3", 4)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child", "18")
.setSource("c_field1", 1, "c_field2", 2, "c_field3", 5)
.setParent("3").execute().actionGet();
client.prepareIndex("test", "child1", "1")
.setSource("c_field1", 1, "c_field2", 2, "c_field3", 6)
.setParent("3").execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
SearchResponse response = client.prepareSearch("test")
.setQuery(
QueryBuilders.hasChildQuery(
"child",
QueryBuilders.customScoreQuery(
matchQuery("c_field2", 0)
).script("doc['c_field1'].value")
).scoreType("sum")
)
.execute().actionGet();
assertThat(response.hits().totalHits(), equalTo(3l));
assertThat(response.hits().hits()[0].id(), equalTo("1"));
assertThat(response.hits().hits()[0].score(), equalTo(6f));
assertThat(response.hits().hits()[1].id(), equalTo("3"));
assertThat(response.hits().hits()[1].score(), equalTo(4f));
assertThat(response.hits().hits()[2].id(), equalTo("2"));
assertThat(response.hits().hits()[2].score(), equalTo(3f));
response = client.prepareSearch("test")
.setQuery(
QueryBuilders.hasChildQuery(
"child",
QueryBuilders.customScoreQuery(
matchQuery("c_field2", 0)
).script("doc['c_field1'].value")
).scoreType("max")
)
.execute().actionGet();
assertThat(response.hits().totalHits(), equalTo(3l));
assertThat(response.hits().hits()[0].id(), equalTo("3"));
assertThat(response.hits().hits()[0].score(), equalTo(4f));
assertThat(response.hits().hits()[1].id(), equalTo("2"));
assertThat(response.hits().hits()[1].score(), equalTo(3f));
assertThat(response.hits().hits()[2].id(), equalTo("1"));
assertThat(response.hits().hits()[2].score(), equalTo(2f));
response = client.prepareSearch("test")
.setQuery(
QueryBuilders.hasChildQuery(
"child",
QueryBuilders.customScoreQuery(
matchQuery("c_field2", 0)
).script("doc['c_field1'].value")
).scoreType("avg")
)
.execute().actionGet();
assertThat(response.hits().totalHits(), equalTo(3l));
assertThat(response.hits().hits()[0].id(), equalTo("3"));
assertThat(response.hits().hits()[0].score(), equalTo(4f));
assertThat(response.hits().hits()[1].id(), equalTo("2"));
assertThat(response.hits().hits()[1].score(), equalTo(3f));
assertThat(response.hits().hits()[2].id(), equalTo("1"));
assertThat(response.hits().hits()[2].score(), equalTo(1.5f));
response = client.prepareSearch("test")
.setQuery(
QueryBuilders.hasParentQuery(
"parent",
QueryBuilders.customScoreQuery(
matchQuery("p_field1", "p_value3")
).script("doc['p_field2'].value")
).scoreType("score")
)
.addSort(SortBuilders.fieldSort("c_field3"))
.addSort(SortBuilders.scoreSort())
.execute().actionGet();
assertThat(response.hits().totalHits(), equalTo(7l));
assertThat(response.hits().hits()[0].id(), equalTo("13"));
assertThat(response.hits().hits()[0].score(), equalTo(5f));
assertThat(response.hits().hits()[1].id(), equalTo("14"));
assertThat(response.hits().hits()[1].score(), equalTo(5f));
assertThat(response.hits().hits()[2].id(), equalTo("15"));
assertThat(response.hits().hits()[2].score(), equalTo(5f));
assertThat(response.hits().hits()[3].id(), equalTo("16"));
assertThat(response.hits().hits()[3].score(), equalTo(5f));
assertThat(response.hits().hits()[4].id(), equalTo("17"));
assertThat(response.hits().hits()[4].score(), equalTo(5f));
assertThat(response.hits().hits()[5].id(), equalTo("18"));
assertThat(response.hits().hits()[5].score(), equalTo(5f));
assertThat(response.hits().hits()[6].id(), equalTo("1"));
assertThat(response.hits().hits()[6].score(), equalTo(5f));
}
}