mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-24 05:44:59 +00:00
Split HasChildQueryParser into toQuery and formXContent
This is an intial commit that splits HasChildQueryParser / Builder into the two seperate steps. This one is particularly nasty since it transports a pretty wild InnerHits object that needs heavy refactoring. Yet, this commit has still some nocommits and needs more tests and maybe another cleanup but it's a start to get the code out there.
This commit is contained in:
parent
f2605b34d6
commit
18bec264f9
@ -715,8 +715,13 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequestBuilder addInnerHit(String name, InnerHitsBuilder.InnerHit innerHit) {
|
||||
innerHitsBuilder().addInnerHit(name, innerHit);
|
||||
public SearchRequestBuilder addParentChildInnerHits(String name, String type, InnerHitsBuilder.InnerHit innerHit) {
|
||||
innerHitsBuilder().addParentChildInnerHits(name, type, innerHit);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequestBuilder addNestedInnerHits(String name, String path, InnerHitsBuilder.InnerHit innerHit) {
|
||||
innerHitsBuilder().addNestedInnerHits(name, path, innerHit);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -18,40 +18,103 @@
|
||||
*/
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
|
||||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A query builder for <tt>has_child</tt> queries.
|
||||
*/
|
||||
public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuilder> {
|
||||
|
||||
/**
|
||||
* The queries name
|
||||
*/
|
||||
public static final String NAME = "has_child";
|
||||
|
||||
private final QueryBuilder queryBuilder;
|
||||
/**
|
||||
* The default cut off point only to evaluate parent documents that contain the matching parent id terms
|
||||
* instead of evaluating all parent docs.
|
||||
*/
|
||||
public static final int DEFAULT_SHORT_CIRCUIT_CUTOFF = 8192;
|
||||
/**
|
||||
* The default maximum number of children that are required to match for the parent to be considered a match.
|
||||
*/
|
||||
public static final int DEFAULT_MAX_CHILDREN = Integer.MAX_VALUE;
|
||||
/**
|
||||
* The default minimum number of children that are required to match for the parent to be considered a match.
|
||||
*/
|
||||
public static final int DEFAULT_MIN_CHILDREN = 0;
|
||||
|
||||
private String childType;
|
||||
private final QueryBuilder query;
|
||||
|
||||
private String scoreType;
|
||||
private final String type;
|
||||
|
||||
private Integer minChildren;
|
||||
private ScoreType scoreType = ScoreType.NONE;
|
||||
|
||||
private Integer maxChildren;
|
||||
private int minChildren = DEFAULT_MIN_CHILDREN;
|
||||
|
||||
private Integer shortCircuitCutoff;
|
||||
private int maxChildren = DEFAULT_MAX_CHILDREN;
|
||||
|
||||
private QueryInnerHitBuilder innerHit = null;
|
||||
private int shortCircuitCutoff = DEFAULT_SHORT_CIRCUIT_CUTOFF;
|
||||
|
||||
static final HasChildQueryBuilder PROTOTYPE = new HasChildQueryBuilder(null, null);
|
||||
private QueryInnerHits queryInnerHits;
|
||||
|
||||
public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) {
|
||||
this.childType = type;
|
||||
this.queryBuilder = queryBuilder;
|
||||
static final HasChildQueryBuilder PROTOTYPE = new HasChildQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
|
||||
|
||||
public HasChildQueryBuilder(String type, QueryBuilder query, Integer maxChildren, Integer minChildren, Integer shortCircuitCutoff, ScoreType scoreType, QueryInnerHits queryInnerHits) {
|
||||
this(type, query);
|
||||
scoreType(scoreType);
|
||||
this.maxChildren = maxChildren;
|
||||
this.minChildren = minChildren;
|
||||
this.shortCircuitCutoff = shortCircuitCutoff;
|
||||
this.queryInnerHits = queryInnerHits;
|
||||
}
|
||||
|
||||
public HasChildQueryBuilder(String type, QueryBuilder query) {
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires 'type' field");
|
||||
}
|
||||
if (query == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires 'query' field");
|
||||
}
|
||||
this.type = type;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines how the scores from the matching child documents are mapped into the parent document.
|
||||
*/
|
||||
public HasChildQueryBuilder scoreType(String scoreType) {
|
||||
public HasChildQueryBuilder scoreType(ScoreType scoreType) {
|
||||
if (scoreType == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires 'score_type' field");
|
||||
}
|
||||
this.scoreType = scoreType;
|
||||
return this;
|
||||
}
|
||||
@ -60,6 +123,9 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
* Defines the minimum number of children that are required to match for the parent to be considered a match.
|
||||
*/
|
||||
public HasChildQueryBuilder minChildren(int minChildren) {
|
||||
if (minChildren < 0) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'min_children' field");
|
||||
}
|
||||
this.minChildren = minChildren;
|
||||
return this;
|
||||
}
|
||||
@ -68,6 +134,9 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
* Defines the maximum number of children that are required to match for the parent to be considered a match.
|
||||
*/
|
||||
public HasChildQueryBuilder maxChildren(int maxChildren) {
|
||||
if (maxChildren < 0) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'max_children' field");
|
||||
}
|
||||
this.maxChildren = maxChildren;
|
||||
return this;
|
||||
}
|
||||
@ -76,7 +145,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
* Configures at what cut off point only to evaluate parent documents that contain the matching parent id terms
|
||||
* instead of evaluating all parent docs.
|
||||
*/
|
||||
public HasChildQueryBuilder setShortCircuitCutoff(int shortCircuitCutoff) {
|
||||
public HasChildQueryBuilder shortCircuitCutoff(int shortCircuitCutoff) {
|
||||
if (shortCircuitCutoff < 0) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'short_circuit_cutoff' field");
|
||||
}
|
||||
this.shortCircuitCutoff = shortCircuitCutoff;
|
||||
return this;
|
||||
}
|
||||
@ -84,34 +156,74 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
/**
|
||||
* Sets inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public HasChildQueryBuilder innerHit(QueryInnerHitBuilder innerHit) {
|
||||
this.innerHit = innerHit;
|
||||
public HasChildQueryBuilder innerHit(QueryInnerHits queryInnerHits) {
|
||||
this.queryInnerHits = queryInnerHits;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public QueryInnerHits innerHit() {
|
||||
return queryInnerHits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the children query to execute.
|
||||
*/
|
||||
public QueryBuilder query() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the child type
|
||||
*/
|
||||
public String childType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns how the scores from the matching child documents are mapped into the parent document.
|
||||
*/
|
||||
public ScoreType scoreType() {
|
||||
return scoreType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the minimum number of children that are required to match for the parent to be considered a match.
|
||||
* The default is {@value #DEFAULT_MAX_CHILDREN}
|
||||
*/
|
||||
public int minChildren() {
|
||||
return minChildren;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the maximum number of children that are required to match for the parent to be considered a match.
|
||||
* The default is {@value #DEFAULT_MIN_CHILDREN}
|
||||
*/
|
||||
public int maxChildren() { return maxChildren; }
|
||||
|
||||
/**
|
||||
* Returns what cut off point only to evaluate parent documents that contain the matching parent id terms
|
||||
* instead of evaluating all parent docs. The default is {@value #DEFAULT_SHORT_CIRCUIT_CUTOFF}
|
||||
*/
|
||||
public int shortCircuitCutoff() {
|
||||
return shortCircuitCutoff;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field("query");
|
||||
queryBuilder.toXContent(builder, params);
|
||||
builder.field("child_type", childType);
|
||||
if (scoreType != null) {
|
||||
builder.field("score_type", scoreType);
|
||||
}
|
||||
if (minChildren != null) {
|
||||
builder.field("min_children", minChildren);
|
||||
}
|
||||
if (maxChildren != null) {
|
||||
builder.field("max_children", maxChildren);
|
||||
}
|
||||
if (shortCircuitCutoff != null) {
|
||||
builder.field("short_circuit_cutoff", shortCircuitCutoff);
|
||||
}
|
||||
query.toXContent(builder, params);
|
||||
builder.field("child_type", type);
|
||||
builder.field("score_type", scoreType.name().toLowerCase(Locale.ROOT));
|
||||
builder.field("min_children", minChildren);
|
||||
builder.field("max_children", maxChildren);
|
||||
builder.field("short_circuit_cutoff", shortCircuitCutoff);
|
||||
printBoostAndQueryName(builder);
|
||||
if (innerHit != null) {
|
||||
builder.startObject("inner_hits");
|
||||
builder.value(innerHit);
|
||||
builder.endObject();
|
||||
if (queryInnerHits != null) {
|
||||
queryInnerHits.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
@ -120,4 +232,233 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
Query innerQuery = query.toQuery(context);
|
||||
if (innerQuery == null) {
|
||||
return null;
|
||||
}
|
||||
innerQuery.setBoost(boost);
|
||||
|
||||
DocumentMapper childDocMapper = context.mapperService().documentMapper(type);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] no mapping for for type [" + type + "]");
|
||||
}
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
|
||||
}
|
||||
if (queryInnerHits != null) {
|
||||
try (XContentParser parser = queryInnerHits.getXcontentParser()) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type ["
|
||||
+ parentType + "]");
|
||||
}
|
||||
|
||||
if (maxChildren > 0 && maxChildren < minChildren) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] 'max_children' is less than 'min_children'");
|
||||
}
|
||||
|
||||
BitSetProducer nonNestedDocsFilter = null;
|
||||
if (parentDocMapper.hasNestedObjects()) {
|
||||
nonNestedDocsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
}
|
||||
|
||||
// wrap the query with type query
|
||||
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
|
||||
|
||||
final Query query;
|
||||
final ParentChildIndexFieldData parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
int maxChildren = maxChildren();
|
||||
// 0 in pre 2.x p/c impl means unbounded
|
||||
if (maxChildren == 0) {
|
||||
maxChildren = Integer.MAX_VALUE;
|
||||
}
|
||||
query = new LateParsingQuery(parentDocMapper.typeFilter(), innerQuery, minChildren(), maxChildren, parentType, scoreTypeToScoreMode(scoreType), parentChildIndexFieldData);
|
||||
} else {
|
||||
// TODO: use the query API
|
||||
Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
|
||||
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
|
||||
query = new ChildrenQuery(parentChildIndexFieldData, parentType, type, parentFilter, innerQuery, scoreType, minChildren,
|
||||
maxChildren, shortCircuitCutoff, nonNestedDocsFilter);
|
||||
} else {
|
||||
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, type, parentFilter,
|
||||
shortCircuitCutoff, nonNestedDocsFilter);
|
||||
}
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
static ScoreMode scoreTypeToScoreMode(ScoreType scoreType) {
|
||||
ScoreMode scoreMode;
|
||||
// TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code.
|
||||
switch (scoreType) {
|
||||
case NONE:
|
||||
scoreMode = ScoreMode.None;
|
||||
break;
|
||||
case MIN:
|
||||
scoreMode = ScoreMode.Min;
|
||||
break;
|
||||
case MAX:
|
||||
scoreMode = ScoreMode.Max;
|
||||
break;
|
||||
case SUM:
|
||||
scoreMode = ScoreMode.Total;
|
||||
break;
|
||||
case AVG:
|
||||
scoreMode = ScoreMode.Avg;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("score type [" + scoreType + "] not supported");
|
||||
}
|
||||
return scoreMode;
|
||||
}
|
||||
|
||||
final static class LateParsingQuery extends Query {
|
||||
|
||||
private final Query toQuery;
|
||||
private final Query innerQuery;
|
||||
private final int minChildren;
|
||||
private final int maxChildren;
|
||||
private final String parentType;
|
||||
private final ScoreMode scoreMode;
|
||||
private final ParentChildIndexFieldData parentChildIndexFieldData;
|
||||
|
||||
LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
|
||||
this.toQuery = toQuery;
|
||||
this.innerQuery = innerQuery;
|
||||
this.minChildren = minChildren;
|
||||
this.maxChildren = maxChildren;
|
||||
this.parentType = parentType;
|
||||
this.scoreMode = scoreMode;
|
||||
this.parentChildIndexFieldData = parentChildIndexFieldData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
if (searchContext == null) {
|
||||
throw new IllegalArgumentException("Search context is required to be set");
|
||||
}
|
||||
|
||||
IndexSearcher indexSearcher = searchContext.searcher();
|
||||
String joinField = ParentFieldMapper.joinField(parentType);
|
||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
|
||||
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
||||
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
|
||||
LateParsingQuery that = (LateParsingQuery) o;
|
||||
|
||||
if (minChildren != that.minChildren) return false;
|
||||
if (maxChildren != that.maxChildren) return false;
|
||||
if (!toQuery.equals(that.toQuery)) return false;
|
||||
if (!innerQuery.equals(that.innerQuery)) return false;
|
||||
if (!parentType.equals(that.parentType)) return false;
|
||||
return scoreMode == that.scoreMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + toQuery.hashCode();
|
||||
result = 31 * result + innerQuery.hashCode();
|
||||
result = 31 * result + minChildren;
|
||||
result = 31 * result + maxChildren;
|
||||
result = 31 * result + parentType.hashCode();
|
||||
result = 31 * result + scoreMode.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String s) {
|
||||
return "LateParsingQuery {parentType=" + parentType + "}";
|
||||
}
|
||||
|
||||
public int getMinChildren() {
|
||||
return minChildren;
|
||||
}
|
||||
|
||||
public int getMaxChildren() {
|
||||
return maxChildren;
|
||||
}
|
||||
|
||||
public ScoreMode getScoreMode() {
|
||||
return scoreMode;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(HasChildQueryBuilder that) {
|
||||
return Objects.equals(query, that.query)
|
||||
&& Objects.equals(type, that.type)
|
||||
&& Objects.equals(scoreType, that.scoreType)
|
||||
&& Objects.equals(minChildren, that.minChildren)
|
||||
&& Objects.equals(maxChildren, that.maxChildren)
|
||||
&& Objects.equals(shortCircuitCutoff, that.shortCircuitCutoff)
|
||||
&& Objects.equals(queryInnerHits, that.queryInnerHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(query, type, scoreType, minChildren, maxChildren, shortCircuitCutoff, queryInnerHits);
|
||||
}
|
||||
|
||||
protected HasChildQueryBuilder(StreamInput in) throws IOException {
|
||||
type = in.readString();
|
||||
minChildren = in.readInt();
|
||||
maxChildren = in.readInt();
|
||||
shortCircuitCutoff = in.readInt();
|
||||
final int ordinal = in.readVInt();
|
||||
scoreType = ScoreType.values()[ordinal];
|
||||
query = in.readQuery();
|
||||
if (in.readBoolean()) {
|
||||
queryInnerHits = new QueryInnerHits(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HasChildQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
return new HasChildQueryBuilder(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(type);
|
||||
out.writeInt(minChildren());
|
||||
out.writeInt(maxChildren());
|
||||
out.writeInt(shortCircuitCutoff());
|
||||
out.writeVInt(scoreType.ordinal());
|
||||
out.writeQuery(query);
|
||||
if (queryInnerHits != null) {
|
||||
out.writeBoolean(true);
|
||||
queryInnerHits.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,88 +19,50 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.query.support.XContentStructure;
|
||||
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
|
||||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
* A query parser for <tt>has_child</tt> queries.
|
||||
*/
|
||||
public class HasChildQueryParser extends BaseQueryParserTemp {
|
||||
public class HasChildQueryParser extends BaseQueryParser {
|
||||
|
||||
private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
|
||||
|
||||
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
|
||||
|
||||
@Inject
|
||||
public HasChildQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) {
|
||||
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[] { HasChildQueryBuilder.NAME, Strings.toCamelCase(HasChildQueryBuilder.NAME) };
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
|
||||
QueryParseContext parseContext = context.parseContext();
|
||||
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
boolean queryFound = false;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String childType = null;
|
||||
ScoreType scoreType = ScoreType.NONE;
|
||||
int minChildren = 0;
|
||||
int maxChildren = 0;
|
||||
int shortCircuitParentDocSet = 8192;
|
||||
int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
|
||||
int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
|
||||
int shortCircuitParentDocSet = HasChildQueryBuilder.DEFAULT_SHORT_CIRCUIT_CUTOFF;
|
||||
String queryName = null;
|
||||
InnerHitsSubSearchContext innerHits = null;
|
||||
|
||||
QueryInnerHits queryInnerHits = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
XContentStructure.InnerQuery iq = null;
|
||||
QueryBuilder iqb = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// Usually, the query would be parsed here, but the child
|
||||
// type may not have been extracted yet, so use the
|
||||
// XContentStructure.<type> facade to parse if available,
|
||||
// or delay parsing if not.
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
iq = new XContentStructure.InnerQuery(parseContext, childType == null ? null : new String[] { childType });
|
||||
queryFound = true;
|
||||
iqb = parseContext.parseInnerQueryBuilder();
|
||||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
innerHits = innerHitsQueryParserHelper.parse(parseContext);
|
||||
queryInnerHits = new QueryInnerHits(parser);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
@ -126,166 +88,10 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] requires 'query' field");
|
||||
}
|
||||
if (childType == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] requires 'type' field");
|
||||
}
|
||||
|
||||
Query innerQuery = iq.asQuery(childType);
|
||||
|
||||
if (innerQuery == null) {
|
||||
return null;
|
||||
}
|
||||
innerQuery.setBoost(boost);
|
||||
|
||||
DocumentMapper childDocMapper = context.mapperService().documentMapper(childType);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
|
||||
}
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] _parent field has no parent type configured");
|
||||
}
|
||||
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : childType;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
|
||||
+ parentType + "]");
|
||||
}
|
||||
|
||||
if (maxChildren > 0 && maxChildren < minChildren) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
|
||||
}
|
||||
|
||||
BitSetProducer nonNestedDocsFilter = null;
|
||||
if (parentDocMapper.hasNestedObjects()) {
|
||||
nonNestedDocsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
}
|
||||
|
||||
// wrap the query with type query
|
||||
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
|
||||
|
||||
final Query query;
|
||||
final ParentChildIndexFieldData parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
|
||||
} else {
|
||||
// TODO: use the query API
|
||||
Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
|
||||
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
|
||||
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,
|
||||
maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
} else {
|
||||
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter,
|
||||
shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
}
|
||||
}
|
||||
if (queryName != null) {
|
||||
context.addNamedQuery(queryName, query);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
return query;
|
||||
}
|
||||
|
||||
public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreType scoreType, Query innerQuery, int minChildren, int maxChildren) throws IOException {
|
||||
ScoreMode scoreMode;
|
||||
// TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code.
|
||||
switch (scoreType) {
|
||||
case NONE:
|
||||
scoreMode = ScoreMode.None;
|
||||
break;
|
||||
case MIN:
|
||||
scoreMode = ScoreMode.Min;
|
||||
break;
|
||||
case MAX:
|
||||
scoreMode = ScoreMode.Max;
|
||||
break;
|
||||
case SUM:
|
||||
scoreMode = ScoreMode.Total;
|
||||
break;
|
||||
case AVG:
|
||||
scoreMode = ScoreMode.Avg;
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException("score type [" + scoreType + "] not supported");
|
||||
}
|
||||
// 0 in pre 2.x p/c impl means unbounded
|
||||
if (maxChildren == 0) {
|
||||
maxChildren = Integer.MAX_VALUE;
|
||||
}
|
||||
return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData);
|
||||
}
|
||||
|
||||
final static class LateParsingQuery extends Query {
|
||||
|
||||
private final Query toQuery;
|
||||
private final Query innerQuery;
|
||||
private final int minChildren;
|
||||
private final int maxChildren;
|
||||
private final String parentType;
|
||||
private final ScoreMode scoreMode;
|
||||
private final ParentChildIndexFieldData parentChildIndexFieldData;
|
||||
private final Object identity = new Object();
|
||||
|
||||
LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
|
||||
this.toQuery = toQuery;
|
||||
this.innerQuery = innerQuery;
|
||||
this.minChildren = minChildren;
|
||||
this.maxChildren = maxChildren;
|
||||
this.parentType = parentType;
|
||||
this.scoreMode = scoreMode;
|
||||
this.parentChildIndexFieldData = parentChildIndexFieldData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
if (searchContext == null) {
|
||||
throw new IllegalArgumentException("Search context is required to be set");
|
||||
}
|
||||
|
||||
IndexSearcher indexSearcher = searchContext.searcher();
|
||||
String joinField = ParentFieldMapper.joinField(parentType);
|
||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
|
||||
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
||||
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
||||
}
|
||||
|
||||
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
||||
|
||||
// We can't check for actually equality here, since we need to IndexReader for this, but
|
||||
// that isn't available on all cases during query parse time, so instead rely on identity:
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
|
||||
LateParsingQuery that = (LateParsingQuery) o;
|
||||
return identity.equals(that.identity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + identity.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String s) {
|
||||
return "LateParsingQuery {parentType=" + parentType + "}";
|
||||
}
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, shortCircuitParentDocSet, scoreType, queryInnerHits);
|
||||
hasChildQueryBuilder.queryName(queryName);
|
||||
hasChildQueryBuilder.boost(boost);
|
||||
return hasChildQueryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,7 +19,7 @@
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -32,7 +32,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
private final QueryBuilder queryBuilder;
|
||||
private final String parentType;
|
||||
private String scoreType;
|
||||
private QueryInnerHitBuilder innerHit = null;
|
||||
private QueryInnerHits innerHit = null;
|
||||
static final HasParentQueryBuilder PROTOTYPE = new HasParentQueryBuilder(null, null);
|
||||
|
||||
/**
|
||||
@ -55,7 +55,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
/**
|
||||
* Sets inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public HasParentQueryBuilder innerHit(QueryInnerHitBuilder innerHit) {
|
||||
public HasParentQueryBuilder innerHit(QueryInnerHits innerHit) {
|
||||
this.innerHit = innerHit;
|
||||
return this;
|
||||
}
|
||||
@ -71,9 +71,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
if (innerHit != null) {
|
||||
builder.startObject("inner_hits");
|
||||
builder.value(innerHit);
|
||||
builder.endObject();
|
||||
innerHit.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -40,7 +41,6 @@ import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper;
|
||||
|
||||
public class HasParentQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
@ -85,7 +85,7 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
|
||||
iq = new XContentStructure.InnerQuery(parseContext, parentType == null ? null : new String[] {parentType});
|
||||
queryFound = true;
|
||||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
innerHits = innerHitsQueryParserHelper.parse(parseContext);
|
||||
innerHits = innerHitsQueryParserHelper.parse(parser);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
@ -198,8 +198,8 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
|
||||
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
|
||||
Filter childrenFilter = new QueryWrapperFilter(Queries.not(parentFilter));
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE;
|
||||
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
|
||||
ScoreMode scoreMode = score ? ScoreMode.Max : ScoreMode.None;
|
||||
return new HasChildQueryBuilder.LateParsingQuery(childrenFilter, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, parentType, scoreMode, parentChildIndexFieldData);
|
||||
} else {
|
||||
if (score) {
|
||||
return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
|
||||
|
@ -43,6 +43,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.search.termslookup.TermsLookupFetchService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
@ -59,6 +60,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
|
||||
public static final String PARSE_STRICT = "index.query.parse.strict";
|
||||
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
|
||||
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
|
||||
|
||||
private CloseableThreadLocal<QueryShardContext> cache = new CloseableThreadLocal<QueryShardContext>() {
|
||||
@Override
|
||||
@ -101,7 +103,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService,
|
||||
BitsetFilterCache bitsetFilterCache,
|
||||
@Nullable SimilarityService similarityService, ClusterService clusterService,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
InnerHitsQueryParserHelper innerHitsQueryParserHelper) {
|
||||
super(index, indexSettings);
|
||||
this.scriptService = scriptService;
|
||||
this.analysisService = analysisService;
|
||||
@ -118,6 +121,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings);
|
||||
this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
|
||||
}
|
||||
|
||||
@Inject(optional=true)
|
||||
@ -347,4 +351,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
public List<Object> handleTermsLookup(TermsLookup termsLookup) {
|
||||
return this.termsLookupFetchService.fetch(termsLookup);
|
||||
}
|
||||
|
||||
public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() {
|
||||
return innerHitsQueryParserHelper;
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
@ -35,7 +35,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
|
||||
private String scoreMode;
|
||||
|
||||
private QueryInnerHitBuilder innerHit;
|
||||
private QueryInnerHits innerHit;
|
||||
|
||||
static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder();
|
||||
|
||||
@ -63,7 +63,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
/**
|
||||
* Sets inner hit definition in the scope of this nested query and reusing the defined path and query.
|
||||
*/
|
||||
public NestedQueryBuilder innerHit(QueryInnerHitBuilder innerHit) {
|
||||
public NestedQueryBuilder innerHit(QueryInnerHits innerHit) {
|
||||
this.innerHit = innerHit;
|
||||
return this;
|
||||
}
|
||||
@ -79,9 +79,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
if (innerHit != null) {
|
||||
builder.startObject("inner_hits");
|
||||
builder.value(innerHit);
|
||||
builder.endObject();
|
||||
innerHit.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public class NestedQueryParser extends BaseQueryParserTemp {
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
|
||||
builder.filter();
|
||||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
builder.setInnerHits(innerHitsQueryParserHelper.parse(parseContext));
|
||||
builder.setInnerHits(innerHitsQueryParserHelper.parse(parser));
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -1,376 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public abstract class BaseInnerHitBuilder<T extends BaseInnerHitBuilder> implements ToXContent {
|
||||
|
||||
protected SearchSourceBuilder sourceBuilder;
|
||||
|
||||
/**
|
||||
* The index to start to return hits from. Defaults to <tt>0</tt>.
|
||||
*/
|
||||
public T setFrom(int from) {
|
||||
sourceBuilder().from(from);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The number of search hits to return. Defaults to <tt>10</tt>.
|
||||
*/
|
||||
public T setSize(int size) {
|
||||
sourceBuilder().size(size);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies when sorting, and controls if scores will be tracked as well. Defaults to
|
||||
* <tt>false</tt>.
|
||||
*/
|
||||
public T setTrackScores(boolean trackScores) {
|
||||
sourceBuilder().trackScores(trackScores);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with an
|
||||
* explanation of the hit (ranking).
|
||||
*/
|
||||
public T setExplain(boolean explain) {
|
||||
sourceBuilder().explain(explain);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with its
|
||||
* version.
|
||||
*/
|
||||
public T setVersion(boolean version) {
|
||||
sourceBuilder().version(version);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a stored field to be loaded and returned with the inner hit.
|
||||
*/
|
||||
public T field(String name) {
|
||||
sourceBuilder().field(name);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no fields to be loaded, resulting in only id and type to be returned per field.
|
||||
*/
|
||||
public T setNoFields() {
|
||||
sourceBuilder().noFields();
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether the response should contain the stored _source for every hit
|
||||
*/
|
||||
public T setFetchSource(boolean fetch) {
|
||||
sourceBuilder().fetchSource(fetch);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param include An optional include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public T setFetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
sourceBuilder().fetchSource(include, exclude);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public T setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
|
||||
sourceBuilder().fetchSource(includes, excludes);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field data based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The field to get from the field data cache
|
||||
*/
|
||||
public T addFieldDataField(String name) {
|
||||
sourceBuilder().fieldDataField(name);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The name that will represent this value in the return hit
|
||||
* @param script The script to use
|
||||
*/
|
||||
public T addScriptField(String name, Script script) {
|
||||
sourceBuilder().scriptField(name, script);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a sort against the given field name and the sort ordering.
|
||||
*
|
||||
* @param field The name of the field
|
||||
* @param order The sort ordering
|
||||
*/
|
||||
public T addSort(String field, SortOrder order) {
|
||||
sourceBuilder().sort(field, order);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a generic sort builder.
|
||||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public T addSort(SortBuilder sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public HighlightBuilder highlightBuilder() {
|
||||
return sourceBuilder().highlighter();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with default fragment size of 100 characters, and
|
||||
* default number of fragments of 5.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
*/
|
||||
public T addHighlightedField(String name) {
|
||||
highlightBuilder().field(name);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters), and
|
||||
* default number of fragments of 5.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
*/
|
||||
public T addHighlightedField(String name, int fragmentSize) {
|
||||
highlightBuilder().field(name, fragmentSize);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters), and
|
||||
* a provided (maximum) number of fragments.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
* @param numberOfFragments The (maximum) number of fragments
|
||||
*/
|
||||
public T addHighlightedField(String name, int fragmentSize, int numberOfFragments) {
|
||||
highlightBuilder().field(name, fragmentSize, numberOfFragments);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters),
|
||||
* a provided (maximum) number of fragments and an offset for the highlight.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
* @param numberOfFragments The (maximum) number of fragments
|
||||
*/
|
||||
public T addHighlightedField(String name, int fragmentSize, int numberOfFragments,
|
||||
int fragmentOffset) {
|
||||
highlightBuilder().field(name, fragmentSize, numberOfFragments, fragmentOffset);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a highlighted field.
|
||||
*/
|
||||
public T addHighlightedField(HighlightBuilder.Field field) {
|
||||
highlightBuilder().field(field);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes
|
||||
* are <tt>styled</tt> and <tt>default</tt>.
|
||||
*
|
||||
* @param schemaName The tag scheme name
|
||||
*/
|
||||
public T setHighlighterTagsSchema(String schemaName) {
|
||||
highlightBuilder().tagsSchema(schemaName);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterFragmentSize(Integer fragmentSize) {
|
||||
highlightBuilder().fragmentSize(fragmentSize);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterNumOfFragments(Integer numOfFragments) {
|
||||
highlightBuilder().numOfFragments(numOfFragments);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterFilter(Boolean highlightFilter) {
|
||||
highlightBuilder().highlightFilter(highlightFilter);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The encoder to set for highlighting
|
||||
*/
|
||||
public T setHighlighterEncoder(String encoder) {
|
||||
highlightBuilder().encoder(encoder);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly set the pre tags that will be used for highlighting.
|
||||
*/
|
||||
public T setHighlighterPreTags(String... preTags) {
|
||||
highlightBuilder().preTags(preTags);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly set the post tags that will be used for highlighting.
|
||||
*/
|
||||
public T setHighlighterPostTags(String... postTags) {
|
||||
highlightBuilder().postTags(postTags);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The order of fragments per field. By default, ordered by the order in the
|
||||
* highlighted text. Can be <tt>score</tt>, which then it will be ordered
|
||||
* by score of the fragments.
|
||||
*/
|
||||
public T setHighlighterOrder(String order) {
|
||||
highlightBuilder().order(order);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterRequireFieldMatch(boolean requireFieldMatch) {
|
||||
highlightBuilder().requireFieldMatch(requireFieldMatch);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterBoundaryMaxScan(Integer boundaryMaxScan) {
|
||||
highlightBuilder().boundaryMaxScan(boundaryMaxScan);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterBoundaryChars(char[] boundaryChars) {
|
||||
highlightBuilder().boundaryChars(boundaryChars);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The highlighter type to use.
|
||||
*/
|
||||
public T setHighlighterType(String type) {
|
||||
highlightBuilder().highlighterType(type);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterFragmenter(String fragmenter) {
|
||||
highlightBuilder().fragmenter(fragmenter);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a query to be used for highlighting all fields instead of the search query.
|
||||
*/
|
||||
public T setHighlighterQuery(QueryBuilder highlightQuery) {
|
||||
highlightBuilder().highlightQuery(highlightQuery);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size of the fragment to return from the beginning of the field if there are no matches to
|
||||
* highlight and the field doesn't also define noMatchSize.
|
||||
* @param noMatchSize integer to set or null to leave out of request. default is null.
|
||||
* @return this builder for chaining
|
||||
*/
|
||||
public T setHighlighterNoMatchSize(Integer noMatchSize) {
|
||||
highlightBuilder().noMatchSize(noMatchSize);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit.
|
||||
*/
|
||||
public T setHighlighterPhraseLimit(Integer phraseLimit) {
|
||||
highlightBuilder().phraseLimit(phraseLimit);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public T setHighlighterOptions(Map<String, Object> options) {
|
||||
highlightBuilder().options(options);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
protected SearchSourceBuilder sourceBuilder() {
|
||||
if (sourceBuilder == null) {
|
||||
sourceBuilder = new SearchSourceBuilder();
|
||||
}
|
||||
return sourceBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (sourceBuilder != null) {
|
||||
sourceBuilder.innerToXContent(builder, params);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
@ -52,13 +53,12 @@ public class InnerHitsQueryParserHelper {
|
||||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
}
|
||||
|
||||
public InnerHitsSubSearchContext parse(QueryParseContext parserContext) throws IOException, QueryParsingException {
|
||||
public InnerHitsSubSearchContext parse(XContentParser parser) throws IOException {
|
||||
String fieldName = null;
|
||||
XContentParser.Token token;
|
||||
String innerHitName = null;
|
||||
SubSearchContext subSearchContext = new SubSearchContext(SearchContext.current());
|
||||
try {
|
||||
XContentParser parser = parserContext.parser();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
@ -73,7 +73,7 @@ public class InnerHitsQueryParserHelper {
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(parserContext, "Failed to parse [_inner_hits]", e);
|
||||
throw new IOException("Failed to parse [_inner_hits]");
|
||||
}
|
||||
return new InnerHitsSubSearchContext(innerHitName, subSearchContext);
|
||||
}
|
||||
|
@ -1,51 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class QueryInnerHitBuilder extends BaseInnerHitBuilder<QueryInnerHitBuilder> {
|
||||
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* Set the key name to be used in the response.
|
||||
*
|
||||
* Defaults to the path if used in nested query, child type if used in has_child query and parent type if used in has_parent.
|
||||
*/
|
||||
public QueryInnerHitBuilder setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.toXContent(builder, params);
|
||||
if (name != null) {
|
||||
builder.field("name", name);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class QueryInnerHits extends ToXContentToBytes implements Writeable<QueryInnerHits> {
|
||||
private final BytesReference queryInnerHitsSearchSource;
|
||||
|
||||
public QueryInnerHits(StreamInput input) throws IOException {
|
||||
queryInnerHitsSearchSource = input.readBytesReference();
|
||||
}
|
||||
|
||||
public QueryInnerHits(XContentParser parser) throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (XContentBuilder builder = XContentFactory.cborBuilder(out)) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
queryInnerHitsSearchSource = builder.bytes();
|
||||
}
|
||||
}
|
||||
|
||||
public QueryInnerHits() {
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
public QueryInnerHits(String name, InnerHitsBuilder.InnerHit innerHit) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (XContentBuilder builder = XContentFactory.cborBuilder(out)) {
|
||||
builder.startObject();
|
||||
if (name != null) {
|
||||
builder.field("name", name);
|
||||
}
|
||||
if (innerHit != null) {
|
||||
innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
builder.endObject();
|
||||
this.queryInnerHitsSearchSource = builder.bytes();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to build xcontent", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryInnerHits readFrom(StreamInput in) throws IOException {
|
||||
return new QueryInnerHits(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("inner_hits");
|
||||
try (XContentParser parser = XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource)) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBytesReference(queryInnerHitsSearchSource);
|
||||
}
|
||||
|
||||
public XContentParser getXcontentParser() throws IOException {
|
||||
return XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
QueryInnerHits that = (QueryInnerHits) o;
|
||||
|
||||
return queryInnerHitsSearchSource.equals(that.queryInnerHitsSearchSource);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return queryInnerHitsSearchSource.hashCode();
|
||||
}
|
||||
}
|
@ -721,4 +721,20 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
||||
}
|
||||
}
|
||||
|
||||
public int getMinChildren() {
|
||||
return minChildren;
|
||||
}
|
||||
|
||||
public int getShortCircuitParentDocSet() {
|
||||
return shortCircuitParentDocSet;
|
||||
}
|
||||
|
||||
public int getMaxChildren() {
|
||||
return maxChildren;
|
||||
}
|
||||
|
||||
public ScoreType getScoreType() {
|
||||
return scoreType;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -66,6 +66,7 @@ import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
|
||||
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
||||
import org.elasticsearch.index.engine.*;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.ShardFieldData;
|
||||
import org.elasticsearch.index.flush.FlushStats;
|
||||
|
@ -19,10 +19,15 @@
|
||||
|
||||
package org.elasticsearch.search.fetch.innerhits;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.support.BaseInnerHitBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
@ -32,12 +37,12 @@ import java.util.Map;
|
||||
*/
|
||||
public class InnerHitsBuilder implements ToXContent {
|
||||
|
||||
private Map<String, InnerHit> innerHits = new HashMap<>();
|
||||
private final Map<String, InnerHitsHolder> innerHits = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("inner_hits");
|
||||
for (Map.Entry<String, InnerHit> entry : innerHits.entrySet()) {
|
||||
for (Map.Entry<String, InnerHitsHolder> entry : innerHits.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
entry.getValue().toXContent(builder, params);
|
||||
builder.endObject();
|
||||
@ -45,15 +50,408 @@ public class InnerHitsBuilder implements ToXContent {
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public void addInnerHit(String name, InnerHit innerHit) {
|
||||
innerHits.put(name, innerHit);
|
||||
/**
|
||||
* For nested inner hits the path to collect child nested docs for.
|
||||
* @param name the name / key of the inner hits in the response
|
||||
* @param path the path into the nested to collect inner hits for
|
||||
* @param innerHit the inner hits definition
|
||||
*/
|
||||
public void addNestedInnerHits(String name, String path, InnerHit innerHit) {
|
||||
if (innerHits.containsKey(name)) {
|
||||
throw new IllegalArgumentException("inner hits for name: [" + name +"] is already registered");
|
||||
}
|
||||
innerHits.put(name, new NestedInnerHitsHolder(path, innerHit));
|
||||
}
|
||||
|
||||
public static class InnerHit extends BaseInnerHitBuilder<InnerHit> {
|
||||
/**
|
||||
* For parent/child inner hits the type to collect inner hits for.
|
||||
* @param name the name / key of the inner hits in the response
|
||||
* @param type the document type to collect inner hits for
|
||||
* @param innerHit the inner hits definition
|
||||
*/
|
||||
public void addParentChildInnerHits(String name, String type, InnerHit innerHit) {
|
||||
innerHits.put(name, new ParentChildInnerHitsHolder(type, innerHit));
|
||||
}
|
||||
|
||||
private static class InnerHitsHolder implements ToXContent{
|
||||
private final InnerHit hits;
|
||||
|
||||
private InnerHitsHolder(InnerHit hits) {
|
||||
this.hits = hits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return hits.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
||||
private static class ParentChildInnerHitsHolder extends InnerHitsHolder {
|
||||
|
||||
private final String type;
|
||||
|
||||
private ParentChildInnerHitsHolder(String type, InnerHit hits) {
|
||||
super(hits);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("type").startObject(type);
|
||||
super.toXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
private static class NestedInnerHitsHolder extends InnerHitsHolder {
|
||||
|
||||
private final String path;
|
||||
|
||||
private NestedInnerHitsHolder(String path, InnerHit hits) {
|
||||
super(hits);
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("path").startObject(path);
|
||||
super.toXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
public static class InnerHit implements ToXContent {
|
||||
|
||||
private SearchSourceBuilder sourceBuilder;
|
||||
private String path;
|
||||
private String type;
|
||||
|
||||
/**
|
||||
* The index to start to return hits from. Defaults to <tt>0</tt>.
|
||||
*/
|
||||
public InnerHit setFrom(int from) {
|
||||
sourceBuilder().from(from);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of search hits to return. Defaults to <tt>10</tt>.
|
||||
*/
|
||||
public InnerHit setSize(int size) {
|
||||
sourceBuilder().size(size);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies when sorting, and controls if scores will be tracked as well. Defaults to
|
||||
* <tt>false</tt>.
|
||||
*/
|
||||
public InnerHit setTrackScores(boolean trackScores) {
|
||||
sourceBuilder().trackScores(trackScores);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with an
|
||||
* explanation of the hit (ranking).
|
||||
*/
|
||||
public InnerHit setExplain(boolean explain) {
|
||||
sourceBuilder().explain(explain);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with its
|
||||
* version.
|
||||
*/
|
||||
public InnerHit setVersion(boolean version) {
|
||||
sourceBuilder().version(version);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a stored field to be loaded and returned with the inner hit.
|
||||
*/
|
||||
public InnerHit field(String name) {
|
||||
sourceBuilder().field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no fields to be loaded, resulting in only id and type to be returned per field.
|
||||
*/
|
||||
public InnerHit setNoFields() {
|
||||
sourceBuilder().noFields();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether the response should contain the stored _source for every hit
|
||||
*/
|
||||
public InnerHit setFetchSource(boolean fetch) {
|
||||
sourceBuilder().fetchSource(fetch);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param include An optional include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public InnerHit setFetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
sourceBuilder().fetchSource(include, exclude);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public InnerHit setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
|
||||
sourceBuilder().fetchSource(includes, excludes);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field data based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The field to get from the field data cache
|
||||
*/
|
||||
public InnerHit addFieldDataField(String name) {
|
||||
sourceBuilder().fieldDataField(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The name that will represent this value in the return hit
|
||||
* @param script The script to use
|
||||
*/
|
||||
public InnerHit addScriptField(String name, Script script) {
|
||||
sourceBuilder().scriptField(name, script);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a sort against the given field name and the sort ordering.
|
||||
*
|
||||
* @param field The name of the field
|
||||
* @param order The sort ordering
|
||||
*/
|
||||
public InnerHit addSort(String field, SortOrder order) {
|
||||
sourceBuilder().sort(field, order);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a generic sort builder.
|
||||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public InnerHit addSort(SortBuilder sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
public HighlightBuilder highlightBuilder() {
|
||||
return sourceBuilder().highlighter();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with default fragment size of 100 characters, and
|
||||
* default number of fragments of 5.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
*/
|
||||
public InnerHit addHighlightedField(String name) {
|
||||
highlightBuilder().field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters), and
|
||||
* default number of fragments of 5.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
*/
|
||||
public InnerHit addHighlightedField(String name, int fragmentSize) {
|
||||
highlightBuilder().field(name, fragmentSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters), and
|
||||
* a provided (maximum) number of fragments.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
* @param numberOfFragments The (maximum) number of fragments
|
||||
*/
|
||||
public InnerHit addHighlightedField(String name, int fragmentSize, int numberOfFragments) {
|
||||
highlightBuilder().field(name, fragmentSize, numberOfFragments);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to be highlighted with a provided fragment size (in characters),
|
||||
* a provided (maximum) number of fragments and an offset for the highlight.
|
||||
*
|
||||
* @param name The field to highlight
|
||||
* @param fragmentSize The size of a fragment in characters
|
||||
* @param numberOfFragments The (maximum) number of fragments
|
||||
*/
|
||||
public InnerHit addHighlightedField(String name, int fragmentSize, int numberOfFragments,
|
||||
int fragmentOffset) {
|
||||
highlightBuilder().field(name, fragmentSize, numberOfFragments, fragmentOffset);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a highlighted field.
|
||||
*/
|
||||
public InnerHit addHighlightedField(HighlightBuilder.Field field) {
|
||||
highlightBuilder().field(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes
|
||||
* are <tt>styled</tt> and <tt>default</tt>.
|
||||
*
|
||||
* @param schemaName The tag scheme name
|
||||
*/
|
||||
public InnerHit setHighlighterTagsSchema(String schemaName) {
|
||||
highlightBuilder().tagsSchema(schemaName);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterFragmentSize(Integer fragmentSize) {
|
||||
highlightBuilder().fragmentSize(fragmentSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterNumOfFragments(Integer numOfFragments) {
|
||||
highlightBuilder().numOfFragments(numOfFragments);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterFilter(Boolean highlightFilter) {
|
||||
highlightBuilder().highlightFilter(highlightFilter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The encoder to set for highlighting
|
||||
*/
|
||||
public InnerHit setHighlighterEncoder(String encoder) {
|
||||
highlightBuilder().encoder(encoder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly set the pre tags that will be used for highlighting.
|
||||
*/
|
||||
public InnerHit setHighlighterPreTags(String... preTags) {
|
||||
highlightBuilder().preTags(preTags);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly set the post tags that will be used for highlighting.
|
||||
*/
|
||||
public InnerHit setHighlighterPostTags(String... postTags) {
|
||||
highlightBuilder().postTags(postTags);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The order of fragments per field. By default, ordered by the order in the
|
||||
* highlighted text. Can be <tt>score</tt>, which then it will be ordered
|
||||
* by score of the fragments.
|
||||
*/
|
||||
public InnerHit setHighlighterOrder(String order) {
|
||||
highlightBuilder().order(order);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterRequireFieldMatch(boolean requireFieldMatch) {
|
||||
highlightBuilder().requireFieldMatch(requireFieldMatch);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterBoundaryMaxScan(Integer boundaryMaxScan) {
|
||||
highlightBuilder().boundaryMaxScan(boundaryMaxScan);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterBoundaryChars(char[] boundaryChars) {
|
||||
highlightBuilder().boundaryChars(boundaryChars);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The highlighter type to use.
|
||||
*/
|
||||
public InnerHit setHighlighterType(String type) {
|
||||
highlightBuilder().highlighterType(type);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterFragmenter(String fragmenter) {
|
||||
highlightBuilder().fragmenter(fragmenter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a query to be used for highlighting all fields instead of the search query.
|
||||
*/
|
||||
public InnerHit setHighlighterQuery(QueryBuilder highlightQuery) {
|
||||
highlightBuilder().highlightQuery(highlightQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size of the fragment to return from the beginning of the field if there are no matches to
|
||||
* highlight and the field doesn't also define noMatchSize.
|
||||
*
|
||||
* @param noMatchSize integer to set or null to leave out of request. default is null.
|
||||
* @return this builder for chaining
|
||||
*/
|
||||
public InnerHit setHighlighterNoMatchSize(Integer noMatchSize) {
|
||||
highlightBuilder().noMatchSize(noMatchSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit.
|
||||
*/
|
||||
public InnerHit setHighlighterPhraseLimit(Integer phraseLimit) {
|
||||
highlightBuilder().phraseLimit(phraseLimit);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit setHighlighterOptions(Map<String, Object> options) {
|
||||
highlightBuilder().options(options);
|
||||
return this;
|
||||
}
|
||||
|
||||
protected SearchSourceBuilder sourceBuilder() {
|
||||
if (sourceBuilder == null) {
|
||||
sourceBuilder = new SearchSourceBuilder();
|
||||
}
|
||||
return sourceBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the query to run for collecting the inner hits.
|
||||
*/
|
||||
@ -62,19 +460,15 @@ public class InnerHitsBuilder implements ToXContent {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For parent/child inner hits the type to collect inner hits for.
|
||||
*/
|
||||
public InnerHit setPath(String path) {
|
||||
this.path = path;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* For nested inner hits the path to collect child nested docs for.
|
||||
* Adds a nested inner hit definition that collects inner hits for hits
|
||||
* on this inner hit level.
|
||||
*/
|
||||
public InnerHit setType(String type) {
|
||||
this.type = type;
|
||||
public InnerHit addNestedInnerHits(String name, String path, InnerHit innerHit) {
|
||||
sourceBuilder().innerHitsBuilder().addNestedInnerHits(name, path, innerHit);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -82,21 +476,17 @@ public class InnerHitsBuilder implements ToXContent {
|
||||
* Adds a nested inner hit definition that collects inner hits for hits
|
||||
* on this inner hit level.
|
||||
*/
|
||||
public InnerHit addInnerHit(String name, InnerHit innerHit) {
|
||||
sourceBuilder().innerHitsBuilder().addInnerHit(name, innerHit);
|
||||
public InnerHit addParentChildInnerHits(String name, String type, InnerHit innerHit) {
|
||||
sourceBuilder().innerHitsBuilder().addParentChildInnerHits(name, type, innerHit);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (path != null) {
|
||||
builder.startObject("path").startObject(path);
|
||||
} else {
|
||||
builder.startObject("type").startObject(type);
|
||||
if (sourceBuilder != null) {
|
||||
sourceBuilder.innerToXContent(builder, params);
|
||||
}
|
||||
super.toXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
@ -285,12 +286,12 @@ public class ChildSearchBenchmark {
|
||||
System.out.println("--> Running has_child query with score type");
|
||||
// run parent child score query
|
||||
for (int j = 0; j < QUERY_WARMUP; j++) {
|
||||
client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType("max")).execute().actionGet();
|
||||
client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType(ScoreType.MAX)).execute().actionGet();
|
||||
}
|
||||
|
||||
totalQueryTime = 0;
|
||||
for (int j = 0; j < QUERY_COUNT; j++) {
|
||||
SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType("max")).execute().actionGet();
|
||||
SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType(ScoreType.MAX)).execute().actionGet();
|
||||
if (j % 10 == 0) {
|
||||
System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]");
|
||||
}
|
||||
@ -300,7 +301,7 @@ public class ChildSearchBenchmark {
|
||||
|
||||
totalQueryTime = 0;
|
||||
for (int j = 0; j < QUERY_COUNT; j++) {
|
||||
SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreType("max")).execute().actionGet();
|
||||
SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.MAX)).execute().actionGet();
|
||||
if (j % 10 == 0) {
|
||||
System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]");
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -178,7 +179,7 @@ public class ChildSearchShortCircuitBenchmark {
|
||||
for (int i = 1; i < PARENT_COUNT; i *= 2) {
|
||||
for (int j = 0; j < QUERY_COUNT; j++) {
|
||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||
.setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreType(ScoreType.MAX))
|
||||
.execute().actionGet();
|
||||
if (searchResponse.getHits().totalHits() != i) {
|
||||
System.err.println("--> mismatch on hits");
|
||||
|
@ -118,7 +118,8 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void init() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
// we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually.
|
||||
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("name", BaseQueryTestCase.class.toString())
|
||||
.put("path.home", createTempDir())
|
||||
@ -146,7 +147,7 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
|
||||
new AnalysisModule(indexSettings, new IndicesAnalysisService(indexSettings)),
|
||||
new SimilarityModule(indexSettings),
|
||||
new IndexNameModule(index),
|
||||
new AbstractModule() {
|
||||
new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
|
||||
@ -197,12 +198,16 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
|
||||
if (randomBoolean()) {
|
||||
QueryShardContext.setTypes(types);
|
||||
} else {
|
||||
TestSearchContext testSearchContext = new TestSearchContext();
|
||||
testSearchContext.setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
setSearchContext(types); // TODO should this be set after we parsed and before we build the query? it makes more sense?
|
||||
}
|
||||
}
|
||||
|
||||
protected void setSearchContext(String[] types) {
|
||||
TestSearchContext testSearchContext = new TestSearchContext();
|
||||
testSearchContext.setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
@After
|
||||
public void afterTest() {
|
||||
QueryShardContext.removeTypes();
|
||||
@ -388,7 +393,7 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
|
||||
}
|
||||
|
||||
//we use the streaming infra to create a copy of the query provided as argument
|
||||
private QB copyQuery(QB query) throws IOException {
|
||||
protected QB copyQuery(QB query) throws IOException {
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
query.writeTo(output);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
|
||||
|
@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
|
||||
public class HasChildQueryBuilderTests extends BaseQueryTestCase<HasChildQueryBuilder> {
|
||||
protected static final String PARENT_TYPE = "parent";
|
||||
protected static final String CHILD_TYPE = "child";
|
||||
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
DATE_FIELD_NAME, "type=date",
|
||||
OBJECT_FIELD_NAME, "type=object"
|
||||
).string()), false, false);
|
||||
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
|
||||
"_parent", "type=" + PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
DATE_FIELD_NAME, "type=date",
|
||||
OBJECT_FIELD_NAME, "type=object"
|
||||
).string()), false, false);
|
||||
}
|
||||
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
private InnerHitsContext context;
|
||||
|
||||
|
||||
@Override
|
||||
public void innerHits(InnerHitsContext innerHitsContext) {
|
||||
context = innerHitsContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InnerHitsContext innerHits() {
|
||||
return context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
return mapperService; // need to build / parse inner hits sort fields
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldDataService fieldData() {
|
||||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a {@link HasChildQueryBuilder} with random values all over the place
|
||||
*/
|
||||
@Override
|
||||
protected HasChildQueryBuilder doCreateTestQueryBuilder() {
|
||||
int min = randomIntBetween(0, Integer.MAX_VALUE / 2);
|
||||
int max = randomIntBetween(min, Integer.MAX_VALUE);
|
||||
InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit().setSize(100).addSort(STRING_FIELD_NAME, SortOrder.ASC);
|
||||
return new HasChildQueryBuilder(CHILD_TYPE,
|
||||
RandomQueryBuilder.createQuery(random()), max, min, randomIntBetween(0, Integer.MAX_VALUE),
|
||||
RandomPicks.randomFrom(random(), ScoreType.values()),
|
||||
SearchContext.current() == null ? null : new QueryInnerHits("inner_hits_name", innerHit));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
QueryBuilder innerQueryBuilder = queryBuilder.query();
|
||||
if (innerQueryBuilder instanceof EmptyQueryBuilder) {
|
||||
assertNull(query);
|
||||
} else if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
||||
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
||||
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
|
||||
assertEquals(queryBuilder.maxChildren(), lpq.getMaxChildren());
|
||||
assertEquals(HasChildQueryBuilder.scoreTypeToScoreMode(queryBuilder.scoreType()), lpq.getScoreMode()); // WTF is this why do we have two?
|
||||
} else {
|
||||
assertThat(query, instanceOf(ChildrenQuery.class));
|
||||
ChildrenQuery lpq = (ChildrenQuery) query;
|
||||
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
|
||||
assertEquals(queryBuilder.maxChildren(), lpq.getMaxChildren());
|
||||
assertEquals(queryBuilder.scoreType(), lpq.getScoreType());
|
||||
assertEquals(queryBuilder.shortCircuitCutoff(), lpq.getShortCircuitParentDocSet());
|
||||
}
|
||||
if (queryBuilder.innerHit() != null) {
|
||||
assertNotNull(SearchContext.current());
|
||||
if (query != null) {
|
||||
assertNotNull(SearchContext.current().innerHits());
|
||||
assertEquals(1, SearchContext.current().innerHits().getInnerHits().size());
|
||||
assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey("inner_hits_name"));
|
||||
InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits().getInnerHits().get("inner_hits_name");
|
||||
assertEquals(innerHits.size(), 100);
|
||||
assertEquals(innerHits.sort().getSort().length, 1);
|
||||
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME);
|
||||
} else {
|
||||
assertNull(SearchContext.current().innerHits());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
QueryBuilder query = RandomQueryBuilder.createQuery(random());
|
||||
try {
|
||||
new HasChildQueryBuilder(null, query);
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
new HasChildQueryBuilder("foo", null);
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
HasChildQueryBuilder foo = new HasChildQueryBuilder("foo", query);// all good
|
||||
try {
|
||||
foo.scoreType(null);
|
||||
fail("must not be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
final int positiveValue = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
try {
|
||||
foo.shortCircuitCutoff(randomIntBetween(Integer.MIN_VALUE, -1));
|
||||
fail("must not be negative");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
|
||||
foo.shortCircuitCutoff(positiveValue);
|
||||
assertEquals(positiveValue, foo.shortCircuitCutoff());
|
||||
|
||||
try {
|
||||
foo.minChildren(randomIntBetween(Integer.MIN_VALUE, -1));
|
||||
fail("must not be negative");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
foo.minChildren(positiveValue);
|
||||
assertEquals(positiveValue, foo.minChildren());
|
||||
try {
|
||||
foo.maxChildren(randomIntBetween(Integer.MIN_VALUE, -1));
|
||||
fail("must not be negative");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
|
||||
foo.maxChildren(positiveValue);
|
||||
assertEquals(positiveValue, foo.maxChildren());
|
||||
}
|
||||
|
||||
public void testParseFromJSON() throws IOException {
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/has-child-with-inner-hits.json").trim();
|
||||
HasChildQueryBuilder queryBuilder = (HasChildQueryBuilder) parseQuery(query);
|
||||
assertEquals(query, queryBuilder.maxChildren(), 1217235442);
|
||||
assertEquals(query, queryBuilder.minChildren(), 883170873);
|
||||
assertEquals(query, queryBuilder.shortCircuitCutoff(), 340606183);
|
||||
assertEquals(query, queryBuilder.boost(), 2.0f, 0.0f);
|
||||
assertEquals(query, queryBuilder.queryName(), "WNzYMJKRwePuRBh");
|
||||
assertEquals(query, queryBuilder.childType(), "child");
|
||||
assertEquals(query, queryBuilder.scoreType(), ScoreType.AVG);
|
||||
assertNotNull(query, queryBuilder.innerHit());
|
||||
assertEquals(query, queryBuilder.innerHit(), new QueryInnerHits("inner_hits_name", new InnerHitsBuilder.InnerHit().setSize(100).addSort("mapped_string", SortOrder.ASC)));
|
||||
// now assert that we actually generate the same JSON
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
queryBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals(query, builder.string());
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
{
|
||||
"has_child" : {
|
||||
"query" : {
|
||||
"range" : {
|
||||
"mapped_string" : {
|
||||
"from" : "agJhRET",
|
||||
"to" : "zvqIq",
|
||||
"include_lower" : true,
|
||||
"include_upper" : true,
|
||||
"boost" : 1.0
|
||||
}
|
||||
}
|
||||
},
|
||||
"child_type" : "child",
|
||||
"score_type" : "avg",
|
||||
"min_children" : 883170873,
|
||||
"max_children" : 1217235442,
|
||||
"short_circuit_cutoff" : 340606183,
|
||||
"boost" : 2.0,
|
||||
"_name" : "WNzYMJKRwePuRBh",
|
||||
"inner_hits" : {
|
||||
"name" : "inner_hits_name",
|
||||
"size" : 100,
|
||||
"sort" : [ {
|
||||
"mapped_string" : {
|
||||
"order" : "asc"
|
||||
}
|
||||
} ]
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class QueryInnerHitsTests extends ESTestCase {
|
||||
|
||||
public void testSerialize() throws IOException {
|
||||
copyAndAssert(new QueryInnerHits());
|
||||
copyAndAssert(new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit()));
|
||||
copyAndAssert(new QueryInnerHits("foo", null));
|
||||
copyAndAssert(new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit().setSize(randomIntBetween(0, 100))));
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
assertJson("{\"inner_hits\":{}}", new QueryInnerHits());
|
||||
assertJson("{\"inner_hits\":{\"name\":\"foo\"}}", new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit()));
|
||||
assertJson("{\"inner_hits\":{\"name\":\"bar\"}}", new QueryInnerHits("bar", null));
|
||||
assertJson("{\"inner_hits\":{\"name\":\"foo\",\"size\":42}}", new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit().setSize(42)));
|
||||
assertJson("{\"inner_hits\":{\"name\":\"boom\",\"from\":66,\"size\":666}}", new QueryInnerHits("boom", new InnerHitsBuilder.InnerHit().setFrom(66).setSize(666)));
|
||||
}
|
||||
|
||||
private void assertJson(String expected, QueryInnerHits hits) throws IOException {
|
||||
QueryInnerHits queryInnerHits = copyAndAssert(hits);
|
||||
String actual;
|
||||
if (randomBoolean()) {
|
||||
actual = oneLineJSON(queryInnerHits);
|
||||
} else {
|
||||
actual = oneLineJSON(hits);
|
||||
}
|
||||
assertEquals(expected, actual);
|
||||
XContentParser parser = hits.getXcontentParser();
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
QueryInnerHits other = copyAndAssert(new QueryInnerHits(parser));
|
||||
assertEquals(expected, oneLineJSON(other));
|
||||
}
|
||||
|
||||
public QueryInnerHits copyAndAssert(QueryInnerHits hits) throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
hits.writeTo(out);
|
||||
QueryInnerHits copy = randomBoolean() ? hits.readFrom(StreamInput.wrap(out.bytes())) : new QueryInnerHits(StreamInput.wrap(out.bytes()));
|
||||
assertEquals(copy.toString() + " vs. " + hits.toString(), copy, hits);
|
||||
return copy;
|
||||
}
|
||||
|
||||
private String oneLineJSON(QueryInnerHits hits) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
hits.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return builder.string().trim();
|
||||
}
|
||||
}
|
@ -243,11 +243,11 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
||||
QueryBuilder queryBuilder;
|
||||
if (random().nextBoolean()) {
|
||||
queryBuilder = hasChildQuery("child", termQuery("field1", childValue))
|
||||
.setShortCircuitCutoff(shortCircuitParentDocSet);
|
||||
.shortCircuitCutoff(shortCircuitParentDocSet);
|
||||
} else {
|
||||
queryBuilder = constantScoreQuery(
|
||||
hasChildQuery("child", termQuery("field1", childValue))
|
||||
.setShortCircuitCutoff(shortCircuitParentDocSet)
|
||||
.shortCircuitCutoff(shortCircuitParentDocSet)
|
||||
);
|
||||
}
|
||||
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||
|
@ -204,10 +204,10 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
||||
int maxChildren = random().nextInt(2) * scaledRandomIntBetween(minChildren, 110);
|
||||
|
||||
QueryBuilder queryBuilder = hasChildQuery("child", constantScoreQuery(termQuery("field1", childValue)))
|
||||
.scoreType(scoreType.name().toLowerCase(Locale.ENGLISH))
|
||||
.scoreType(scoreType)
|
||||
.minChildren(minChildren)
|
||||
.maxChildren(maxChildren)
|
||||
.setShortCircuitCutoff(shortCircuitParentDocSet);
|
||||
.shortCircuitCutoff(shortCircuitParentDocSet);
|
||||
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||
queryBuilder = filteredQuery(queryBuilder, notQuery(termQuery("filter", "me")));
|
||||
Query query = parseQuery(queryBuilder);
|
||||
@ -365,8 +365,8 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
||||
// child query that returns the score as the value of "childScore" for each child document, with the parent's score determined by the score type
|
||||
QueryBuilder childQueryBuilder = functionScoreQuery(typeQuery("child")).add(new FieldValueFactorFunctionBuilder(CHILD_SCORE_NAME));
|
||||
QueryBuilder queryBuilder = hasChildQuery("child", childQueryBuilder)
|
||||
.scoreType(scoreType.name().toLowerCase(Locale.ENGLISH))
|
||||
.setShortCircuitCutoff(parentDocs);
|
||||
.scoreType(scoreType)
|
||||
.shortCircuitCutoff(parentDocs);
|
||||
|
||||
// Perform the search for the documents using the selected score type
|
||||
Query query = parseQuery(queryBuilder);
|
||||
|
@ -41,12 +41,11 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.percolator.PercolatorException;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.functionscore.factor.FactorBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
@ -2013,7 +2012,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
||||
assertAcked(prepareCreate("index").addMapping("mapping", mapping));
|
||||
try {
|
||||
client().prepareIndex("index", PercolatorService.TYPE_NAME, "1")
|
||||
.setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value")).innerHit(new QueryInnerHitBuilder())).endObject())
|
||||
.setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value")).innerHit(new QueryInnerHits())).endObject())
|
||||
.execute().actionGet();
|
||||
fail("Expected a parse error, because inner_hits isn't supported in the percolate api");
|
||||
} catch (Exception e) {
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.junit.Test;
|
||||
@ -86,7 +87,7 @@ public class ChildQuerySearchBwcIT extends ChildQuerySearchIT {
|
||||
assertSearchHits(searchResponse, parentId);
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertSearchHits(searchResponse, parentId);
|
||||
@ -125,7 +126,7 @@ public class ChildQuerySearchBwcIT extends ChildQuerySearchIT {
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setExplain(true)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
|
||||
@ -138,7 +139,7 @@ public class ChildQuerySearchBwcIT extends ChildQuerySearchIT {
|
||||
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
|
||||
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertThat(explainResponse.isExists(), equalTo(true));
|
||||
// TODO: improve test once explanations are actually implemented
|
||||
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.query.HasChildQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
@ -287,7 +286,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
for (int i = 1; i <= 10; i++) {
|
||||
logger.info("Round {}", i);
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreType("max")))
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.MAX)))
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
searchResponse = client().prepareSearch("test")
|
||||
@ -549,7 +548,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get();
|
||||
refresh();
|
||||
|
||||
CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertHitCount(countResponse, 1l);
|
||||
|
||||
@ -580,7 +579,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setExplain(true)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1"));
|
||||
@ -593,7 +592,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1"));
|
||||
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertThat(explainResponse.isExists(), equalTo(true));
|
||||
assertThat(explainResponse.getExplanation().getDetails()[0].getDescription(), equalTo("Score based on join value p1"));
|
||||
@ -671,7 +670,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
|
||||
scriptFunction(new Script("doc['c_field1'].value")))
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType("sum")).get();
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.SUM)).get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("1"));
|
||||
@ -688,7 +687,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
|
||||
scriptFunction(new Script("doc['c_field1'].value")))
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType("max")).get();
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.MAX)).get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -705,7 +704,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0),
|
||||
scriptFunction(new Script("doc['c_field1'].value")))
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType("avg")).get();
|
||||
.boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.AVG)).get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -762,7 +761,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
|
||||
response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreType("max"))
|
||||
response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
@ -858,7 +857,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
SearchType[] searchTypes = new SearchType[]{SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH};
|
||||
for (SearchType searchType : searchTypes) {
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(searchType)
|
||||
.setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreType("max")).addSort("p_field", SortOrder.ASC)
|
||||
.setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreType(ScoreType.MAX)).addSort("p_field", SortOrder.ASC)
|
||||
.setSize(5).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(10L));
|
||||
@ -901,7 +900,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType("sum")).get();
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType(ScoreType.SUM)).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
|
||||
@ -926,7 +925,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
client().admin().indices().prepareRefresh("test").get();
|
||||
}
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType("sum"))
|
||||
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType(ScoreType.SUM))
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||
@ -962,7 +961,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "child", "c5").setSource("c_field", "x").setParent("p2").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreType("sum"))
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.SUM))
|
||||
.setMinScore(3) // Score needs to be 3 or above!
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
@ -1231,7 +1230,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get();
|
||||
refresh();
|
||||
|
||||
String scoreMode = ScoreType.values()[getRandom().nextInt(ScoreType.values().length)].name().toLowerCase(Locale.ROOT);
|
||||
ScoreType scoreMode = ScoreType.values()[getRandom().nextInt(ScoreType.values().length)];
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(filteredQuery(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreType(scoreMode), notQuery(termQuery("p_field", "3"))))
|
||||
.get();
|
||||
@ -1257,7 +1256,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max").queryName("test"))
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX).queryName("test"))
|
||||
.get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
@ -1305,7 +1304,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX))
|
||||
.get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
@ -1573,7 +1572,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
return indexBuilders;
|
||||
}
|
||||
|
||||
private SearchResponse minMaxQuery(String scoreType, int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException {
|
||||
private SearchResponse minMaxQuery(ScoreType scoreType, int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException {
|
||||
return client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
@ -1584,7 +1583,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
.add(QueryBuilders.matchAllQuery(), weightFactorFunction(1))
|
||||
.add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1))
|
||||
.add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreType(scoreType)
|
||||
.minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff))
|
||||
.minChildren(minChildren).maxChildren(maxChildren).shortCircuitCutoff(cutoff))
|
||||
.addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get();
|
||||
}
|
||||
|
||||
@ -1593,7 +1592,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
QueryBuilders.constantScoreQuery(QueryBuilders.hasChildQuery("child", termQuery("foo", "two"))
|
||||
.minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff)))
|
||||
.minChildren(minChildren).maxChildren(maxChildren).shortCircuitCutoff(cutoff)))
|
||||
.addSort("id", SortOrder.ASC).setTrackScores(true).get();
|
||||
}
|
||||
|
||||
@ -1609,7 +1608,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
int cutoff = getRandom().nextInt(4);
|
||||
|
||||
// Score mode = NONE
|
||||
response = minMaxQuery("none", 0, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 0, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
|
||||
@ -1619,7 +1618,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 1, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 1, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
|
||||
@ -1629,7 +1628,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 2, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 2, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -1637,17 +1636,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 3, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 3, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 4, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 4, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
|
||||
response = minMaxQuery("none", 0, 4, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 0, 4, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
|
||||
@ -1657,7 +1656,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 0, 3, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 0, 3, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
|
||||
@ -1667,7 +1666,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 0, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 0, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
|
||||
@ -1675,21 +1674,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("none", 2, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 2, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
|
||||
|
||||
try {
|
||||
response = minMaxQuery("none", 3, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.NONE, 3, 2, cutoff);
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
|
||||
}
|
||||
|
||||
// Score mode = SUM
|
||||
response = minMaxQuery("sum", 0, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 0, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1699,7 +1698,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("sum", 1, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 1, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1709,7 +1708,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("sum", 2, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 2, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1717,17 +1716,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(3f));
|
||||
|
||||
response = minMaxQuery("sum", 3, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 3, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
|
||||
|
||||
response = minMaxQuery("sum", 4, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 4, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
|
||||
response = minMaxQuery("sum", 0, 4, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 0, 4, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1737,7 +1736,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("sum", 0, 3, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 0, 3, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1747,7 +1746,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("sum", 0, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 0, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -1755,21 +1754,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("sum", 2, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 2, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
|
||||
|
||||
try {
|
||||
response = minMaxQuery("sum", 3, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.SUM, 3, 2, cutoff);
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
|
||||
}
|
||||
|
||||
// Score mode = MAX
|
||||
response = minMaxQuery("max", 0, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 0, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1779,7 +1778,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("max", 1, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 1, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1789,7 +1788,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("max", 2, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 2, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1797,17 +1796,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(2f));
|
||||
|
||||
response = minMaxQuery("max", 3, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 3, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
|
||||
|
||||
response = minMaxQuery("max", 4, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 4, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
|
||||
response = minMaxQuery("max", 0, 4, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 0, 4, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1817,7 +1816,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("max", 0, 3, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 0, 3, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1827,7 +1826,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("max", 0, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 0, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -1835,21 +1834,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("max", 2, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 2, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
|
||||
|
||||
try {
|
||||
response = minMaxQuery("max", 3, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.MAX, 3, 2, cutoff);
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
|
||||
}
|
||||
|
||||
// Score mode = AVG
|
||||
response = minMaxQuery("avg", 0, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 0, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1859,7 +1858,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("avg", 1, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 1, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1869,7 +1868,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("avg", 2, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 2, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1877,17 +1876,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1.5f));
|
||||
|
||||
response = minMaxQuery("avg", 3, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 3, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
|
||||
|
||||
response = minMaxQuery("avg", 4, 0, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 4, 0, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(0l));
|
||||
|
||||
response = minMaxQuery("avg", 0, 4, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 0, 4, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1897,7 +1896,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("avg", 0, 3, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 0, 3, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
|
||||
@ -1907,7 +1906,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[2].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[2].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("avg", 0, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 0, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
@ -1915,14 +1914,14 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
|
||||
assertThat(response.getHits().hits()[1].score(), equalTo(1f));
|
||||
|
||||
response = minMaxQuery("avg", 2, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 2, 2, cutoff);
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
|
||||
assertThat(response.getHits().hits()[0].score(), equalTo(1.5f));
|
||||
|
||||
try {
|
||||
response = minMaxQuery("avg", 3, 2, cutoff);
|
||||
response = minMaxQuery(ScoreType.AVG, 3, 2, cutoff);
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
|
||||
@ -2033,7 +2032,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
||||
|
||||
static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder queryBuilder) {
|
||||
HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(type, queryBuilder);
|
||||
hasChildQueryBuilder.setShortCircuitCutoff(randomInt(10));
|
||||
hasChildQueryBuilder.shortCircuitCutoff(randomInt(10));
|
||||
return hasChildQueryBuilder;
|
||||
}
|
||||
|
||||
|
@ -27,10 +27,11 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
@ -87,9 +88,9 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
// Inner hits can be defined in two ways: 1) with the query 2) as seperate inner_hit definition
|
||||
SearchRequest[] searchRequests = new SearchRequest[]{
|
||||
client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().setName("comment"))).request(),
|
||||
client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits("comment", null))).request(),
|
||||
client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit().setPath("comments").setQuery(matchQuery("comments.message", "fox"))).request()
|
||||
.addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.message", "fox"))).request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
@ -111,11 +112,11 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit().setPath("comments").setQuery(matchQuery("comments.message", "elephant"))).request(),
|
||||
.addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.message", "elephant"))).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHitBuilder().setName("comment"))).request(),
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHits("comment", null))).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHitBuilder().setName("comment").addSort("_doc", SortOrder.DESC))).request()
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHits("comment", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC)))).request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
@ -137,24 +138,24 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2));
|
||||
}
|
||||
|
||||
InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit();
|
||||
innerHit.highlightBuilder().field("comments.message");
|
||||
innerHit.setExplain(true);
|
||||
innerHit.addFieldDataField("comments.message");
|
||||
innerHit.addScriptField("script", new Script("doc['comments.message'].value"));
|
||||
innerHit.setSize(1);
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")))
|
||||
.addInnerHit("comments", new InnerHitsBuilder.InnerHit().setPath("comments")
|
||||
.addNestedInnerHits("comments", "comments", new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(matchQuery("comments.message", "fox"))
|
||||
.addHighlightedField("comments.message")
|
||||
.setExplain(true)
|
||||
.addFieldDataField("comments.message")
|
||||
.addScriptField("script", new Script("doc['comments.message'].value"))
|
||||
.addScriptField("script", new Script("doc['comments.message'].value"))
|
||||
.setSize(1)).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder()
|
||||
.addHighlightedField("comments.message")
|
||||
.setExplain(true)
|
||||
.addFieldDataField("comments.message")
|
||||
.addScriptField("script", new Script("doc['comments.message'].value"))
|
||||
.setSize(1))).request()
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, innerHit))).request()
|
||||
};
|
||||
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
@ -200,17 +201,17 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.addInnerHit("a", new InnerHitsBuilder.InnerHit().setPath("field1").addSort("_doc", SortOrder.DESC).setSize(size)) // Sort order is DESC, because we reverse the inner objects during indexing!
|
||||
.addInnerHit("b", new InnerHitsBuilder.InnerHit().setPath("field2").addSort("_doc", SortOrder.DESC).setSize(size))
|
||||
.addNestedInnerHits("a", "field1", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)) // Sort order is DESC, because we reverse the inner objects during indexing!
|
||||
.addNestedInnerHits("b", "field2", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size))
|
||||
.get();
|
||||
} else {
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
if (randomBoolean()) {
|
||||
boolQuery.should(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_doc", SortOrder.DESC).setSize(size)));
|
||||
boolQuery.should(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_doc", SortOrder.DESC).setSize(size)));
|
||||
boolQuery.should(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size))));
|
||||
boolQuery.should(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size))));
|
||||
} else {
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_doc", SortOrder.DESC).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_doc", SortOrder.DESC).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)))));
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)))));
|
||||
}
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(boolQuery)
|
||||
@ -266,10 +267,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
SearchRequest[] searchRequests = new SearchRequest[]{
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox")))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment").setQuery(matchQuery("message", "fox")))
|
||||
.addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "fox")))
|
||||
.request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox")).innerHit(new QueryInnerHitBuilder().setName("comment")))
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox")).innerHit(new QueryInnerHits("comment", null)))
|
||||
.request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
@ -292,10 +293,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant")))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment").setQuery(matchQuery("message", "elephant")))
|
||||
.addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "elephant")))
|
||||
.request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant")).innerHit(new QueryInnerHits()))
|
||||
.request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
@ -315,11 +316,16 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("6"));
|
||||
assertThat(innerHits.getAt(2).type(), equalTo("comment"));
|
||||
}
|
||||
|
||||
InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit();
|
||||
innerHit.highlightBuilder().field("message");
|
||||
innerHit.setExplain(true);
|
||||
innerHit.addFieldDataField("message");
|
||||
innerHit.addScriptField("script", new Script("doc['message'].value"));
|
||||
innerHit.setSize(1);
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox")))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment")
|
||||
.addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(matchQuery("message", "fox"))
|
||||
.addHighlightedField("message")
|
||||
.setExplain(true)
|
||||
@ -327,12 +333,11 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.addScriptField("script", new Script("doc['message'].value"))
|
||||
.setSize(1)
|
||||
).request(),
|
||||
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox")).innerHit(
|
||||
new QueryInnerHitBuilder().addHighlightedField("message").setExplain(true)
|
||||
.addFieldDataField("message").addScriptField("script", new Script("doc['message'].value"))
|
||||
.setSize(1))).request() };
|
||||
new QueryInnerHits(null, innerHit))).request() };
|
||||
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
@ -384,17 +389,17 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.setSize(numDocs)
|
||||
.setTypes("parent")
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.addInnerHit("a", new InnerHitsBuilder.InnerHit().setType("child1").addSort("_uid", SortOrder.ASC).setSize(size))
|
||||
.addInnerHit("b", new InnerHitsBuilder.InnerHit().setType("child2").addSort("_uid", SortOrder.ASC).setSize(size))
|
||||
.addParentChildInnerHits("a", "child1", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))
|
||||
.addParentChildInnerHits("b", "child2", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))
|
||||
.get();
|
||||
} else {
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
if (randomBoolean()) {
|
||||
boolQuery.should(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_uid", SortOrder.ASC).setSize(size)));
|
||||
boolQuery.should(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_uid", SortOrder.ASC).setSize(size)));
|
||||
boolQuery.should(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))));
|
||||
boolQuery.should(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))));
|
||||
} else {
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_uid", SortOrder.ASC).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_uid", SortOrder.ASC).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)))));
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)))));
|
||||
}
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
@ -446,7 +451,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
ensureGreen("articles");
|
||||
try {
|
||||
client().prepareSearch("articles")
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit())
|
||||
.addParentChildInnerHits("comment", null, new InnerHitsBuilder.InnerHit())
|
||||
.get();
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), containsString("Failed to build"));
|
||||
@ -473,7 +478,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.setQuery(
|
||||
boolQuery()
|
||||
.must(matchQuery("body", "fail2ban"))
|
||||
.must(hasParentQuery("question", matchAllQuery()).innerHit(new QueryInnerHitBuilder()))
|
||||
.must(hasParentQuery("question", matchAllQuery()).innerHit(new QueryInnerHits()))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 2);
|
||||
@ -512,10 +517,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "good"))))
|
||||
.addInnerHit("comment",
|
||||
new InnerHitsBuilder.InnerHit().setType("comment")
|
||||
.addParentChildInnerHits("comment", "comment",
|
||||
new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(hasChildQuery("remark", matchQuery("message", "good")))
|
||||
.addInnerHit("remark", new InnerHitsBuilder.InnerHit().setType("remark").setQuery(matchQuery("message", "good")))
|
||||
.addParentChildInnerHits("remark", "remark", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "good")))
|
||||
)
|
||||
.get();
|
||||
|
||||
@ -536,10 +541,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "bad"))))
|
||||
.addInnerHit("comment",
|
||||
new InnerHitsBuilder.InnerHit().setType("comment")
|
||||
.addParentChildInnerHits("comment", "comment",
|
||||
new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(hasChildQuery("remark", matchQuery("message", "bad")))
|
||||
.addInnerHit("remark", new InnerHitsBuilder.InnerHit().setType("remark").setQuery(matchQuery("message", "bad")))
|
||||
.addParentChildInnerHits("remark", "remark", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "bad")))
|
||||
)
|
||||
.get();
|
||||
|
||||
@ -604,10 +609,9 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"))))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit()
|
||||
.setPath("comments")
|
||||
.addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good")))
|
||||
.addInnerHit("remark", new InnerHitsBuilder.InnerHit().setPath("comments.remarks").setQuery(matchQuery("comments.remarks.message", "good")))
|
||||
.addNestedInnerHits("remark", "comments.remarks", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.remarks.message", "good")))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -630,7 +634,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
// Directly refer to the second level:
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")).innerHit(new QueryInnerHits()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -647,10 +651,9 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"))))
|
||||
.addInnerHit("comment", new InnerHitsBuilder.InnerHit()
|
||||
.setPath("comments")
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")))
|
||||
.addInnerHit("remark", new InnerHitsBuilder.InnerHit().setPath("comments.remarks").setQuery(matchQuery("comments.remarks.message", "bad"))))
|
||||
.addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit()
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")))
|
||||
.addNestedInnerHits("remark", "comments.remarks", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.remarks.message", "bad"))))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -685,7 +688,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -723,8 +726,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().field("comments.message")))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message"))))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
@ -760,9 +763,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.startObject("comments").field("message", "fox eat quick").endObject()
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
|
||||
builder.highlightBuilder().field("comments.message");
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().addHighlightedField("comments.message")))
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -781,13 +785,13 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.addMapping("article", jsonBuilder().startObject()
|
||||
.startObject("_source").field("excludes", new String[]{"comments"}).endObject()
|
||||
.startObject("properties")
|
||||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", "yes").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", "yes").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
)
|
||||
);
|
||||
@ -798,9 +802,11 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.startObject("comments").field("message", "fox eat quick").endObject()
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
|
||||
builder.field("comments.message");
|
||||
builder.setFetchSource(true);
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().field("comments.message").setFetchSource(true)))
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -836,10 +842,11 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.startObject("comments").field("message", "fox eat quick").endObject()
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
|
||||
builder.highlightBuilder().field("comments.message");
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().addHighlightedField("comments.message")))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
@ -880,7 +887,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHits()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -892,7 +899,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear")).innerHit(new QueryInnerHits()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -911,7 +918,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHits()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
@ -927,11 +934,11 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
public void testRoyals() throws Exception {
|
||||
assertAcked(
|
||||
prepareCreate("royals")
|
||||
.addMapping("king")
|
||||
.addMapping("prince", "_parent", "type=king")
|
||||
.addMapping("duke", "_parent", "type=prince")
|
||||
.addMapping("earl", "_parent", "type=duke")
|
||||
.addMapping("baron", "_parent", "type=earl")
|
||||
.addMapping("king")
|
||||
.addMapping("prince", "_parent", "type=king")
|
||||
.addMapping("duke", "_parent", "type=prince")
|
||||
.addMapping("earl", "_parent", "type=duke")
|
||||
.addMapping("baron", "_parent", "type=earl")
|
||||
);
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
@ -950,15 +957,14 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
SearchResponse response = client().prepareSearch("royals")
|
||||
.setTypes("duke")
|
||||
.addInnerHit("earls", new InnerHitsBuilder.InnerHit()
|
||||
.setType("earl")
|
||||
.addParentChildInnerHits("earls", "earl", new InnerHitsBuilder.InnerHit()
|
||||
.addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
|
||||
.setSize(4)
|
||||
.addInnerHit("barons", new InnerHitsBuilder.InnerHit().setType("baron"))
|
||||
.addParentChildInnerHits("barons", "baron", new InnerHitsBuilder.InnerHit())
|
||||
)
|
||||
.addInnerHit("princes",
|
||||
new InnerHitsBuilder.InnerHit().setType("prince")
|
||||
.addInnerHit("kings", new InnerHitsBuilder.InnerHit().setType("king"))
|
||||
.addParentChildInnerHits("princes", "prince",
|
||||
new InnerHitsBuilder.InnerHit()
|
||||
.addParentChildInnerHits("kings", "king", new InnerHitsBuilder.InnerHit())
|
||||
)
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
@ -1066,7 +1072,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1"))
|
||||
.should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2"))
|
||||
.should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3"))
|
||||
).innerHit(new QueryInnerHitBuilder().addSort("nested1.n_field1", SortOrder.ASC)))
|
||||
).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().addSort("nested1.n_field1", SortOrder.ASC))))
|
||||
.setSize(numDocs)
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.get();
|
||||
@ -1106,7 +1112,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1")).innerHit(new QueryInnerHits()))
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertHitCount(response, 2);
|
||||
@ -1121,7 +1127,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
||||
|
||||
response = client().prepareSearch("index")
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2")).innerHit(new QueryInnerHitBuilder()))
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2")).innerHit(new QueryInnerHits()))
|
||||
.addSort("_id", SortOrder.ASC)
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
|
@ -72,3 +72,10 @@ Support for `queryName` and `boost` has been streamlined to all of the queries.
|
||||
a breaking change till queries get sent over the network as serialized json rather
|
||||
than in `Streamable` format. In fact whenever additional fields are added to the json
|
||||
representation of the query, older nodes might throw error when they find unknown fields.
|
||||
|
||||
==== InnerHitsBuilder
|
||||
|
||||
InnerHitsBuilder now has a dedicated addParentChildInnerHits and addNestedInnerHits methods
|
||||
to differentiate between inner hits for nested vs. parent / child documents. This change
|
||||
makes the type / path parameter mandatory.
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user