Nested Objects Facets Support, closes #1098.
This commit is contained in:
parent
90209d8dff
commit
a4c9f11b1c
|
@ -7,7 +7,7 @@
|
|||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
|
|
|
@ -33,6 +33,8 @@ public class NestedQueryBuilder extends BaseQueryBuilder {
|
|||
|
||||
private float boost = 1.0f;
|
||||
|
||||
private String scope;
|
||||
|
||||
public NestedQueryBuilder(String path, QueryBuilder queryBuilder) {
|
||||
this.path = path;
|
||||
this.queryBuilder = queryBuilder;
|
||||
|
@ -46,6 +48,11 @@ public class NestedQueryBuilder extends BaseQueryBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public NestedQueryBuilder scope(String scope) {
|
||||
this.scope = scope;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the boost for this query. Documents matching this query will (in addition to the normal
|
||||
* weightings) have their score multiplied by the boost provided.
|
||||
|
@ -63,6 +70,9 @@ public class NestedQueryBuilder extends BaseQueryBuilder {
|
|||
if (scoreMode != null) {
|
||||
builder.field("score_mode", scoreMode);
|
||||
}
|
||||
if (scope != null) {
|
||||
builder.field("_scope", scope);
|
||||
}
|
||||
if (boost != 1.0f) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.search.nested.BlockJoinQuery;
|
||||
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -52,6 +53,7 @@ public class NestedQueryParser implements QueryParser {
|
|||
Query query = null;
|
||||
Filter filter = null;
|
||||
float boost = 1.0f;
|
||||
String scope = null;
|
||||
String path = null;
|
||||
BlockJoinQuery.ScoreMode scoreMode = BlockJoinQuery.ScoreMode.Avg;
|
||||
|
||||
|
@ -77,6 +79,8 @@ public class NestedQueryParser implements QueryParser {
|
|||
path = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("_scope".equals(currentFieldName)) {
|
||||
scope = parser.text();
|
||||
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(scoreMode)) {
|
||||
String sScoreMode = parser.text();
|
||||
if ("avg".equals(sScoreMode)) {
|
||||
|
@ -137,6 +141,11 @@ public class NestedQueryParser implements QueryParser {
|
|||
parentFilterContext.set(currentParentFilterContext);
|
||||
|
||||
BlockJoinQuery joinQuery = new BlockJoinQuery(query, parentFilter, scoreMode);
|
||||
|
||||
if (scope != null) {
|
||||
SearchContext.current().addNestedQuery(scope, joinQuery);
|
||||
}
|
||||
|
||||
return joinQuery;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,18 +21,11 @@ package org.elasticsearch.index.search.nested;
|
|||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.elasticsearch.common.lucene.docset.OpenBitDocSet;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
@ -77,8 +70,7 @@ import java.util.Set;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
|
||||
// LUCENE MONITOR: Track, additions include:
|
||||
// --
|
||||
// LUCENE MONITOR: Track CHANGE
|
||||
public class BlockJoinQuery extends Query {
|
||||
|
||||
public static enum ScoreMode {None, Avg, Max, Total}
|
||||
|
@ -88,6 +80,13 @@ public class BlockJoinQuery extends Query {
|
|||
private final Filter parentsFilter;
|
||||
private final Query childQuery;
|
||||
|
||||
private Collector childCollector = NoopCollector.NOOP_COLLECTOR;
|
||||
|
||||
public BlockJoinQuery setCollector(Collector collector) {
|
||||
this.childCollector = collector;
|
||||
return this;
|
||||
}
|
||||
|
||||
// If we are rewritten, this is the original childQuery we
|
||||
// were passed; we use this for .equals() and
|
||||
// .hashCode(). This makes rewritten query equal the
|
||||
|
@ -114,7 +113,7 @@ public class BlockJoinQuery extends Query {
|
|||
|
||||
@Override
|
||||
public Weight createWeight(Searcher searcher) throws IOException {
|
||||
return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode);
|
||||
return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode, childCollector);
|
||||
}
|
||||
|
||||
private static class BlockJoinWeight extends Weight {
|
||||
|
@ -122,13 +121,15 @@ public class BlockJoinQuery extends Query {
|
|||
private final Weight childWeight;
|
||||
private final Filter parentsFilter;
|
||||
private final ScoreMode scoreMode;
|
||||
private final Collector childCollector;
|
||||
|
||||
public BlockJoinWeight(Query joinQuery, Weight childWeight, Filter parentsFilter, ScoreMode scoreMode) {
|
||||
public BlockJoinWeight(Query joinQuery, Weight childWeight, Filter parentsFilter, ScoreMode scoreMode, Collector childCollector) {
|
||||
super();
|
||||
this.joinQuery = joinQuery;
|
||||
this.childWeight = childWeight;
|
||||
this.parentsFilter = parentsFilter;
|
||||
this.scoreMode = scoreMode;
|
||||
this.childCollector = childCollector;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -183,7 +184,13 @@ public class BlockJoinQuery extends Query {
|
|||
throw new IllegalStateException("parentFilter must return OpenBitSet; got " + parents);
|
||||
}
|
||||
|
||||
return new BlockJoinScorer(this, childScorer, (OpenBitSet) parents, firstChildDoc, scoreMode);
|
||||
// CHANGE:
|
||||
if (childCollector != null) {
|
||||
childCollector.setNextReader(reader, 0);
|
||||
childCollector.setScorer(childScorer);
|
||||
}
|
||||
|
||||
return new BlockJoinScorer(this, childScorer, (OpenBitSet) parents, firstChildDoc, scoreMode, childCollector);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -203,6 +210,7 @@ public class BlockJoinQuery extends Query {
|
|||
private final Scorer childScorer;
|
||||
private final OpenBitSet parentBits;
|
||||
private final ScoreMode scoreMode;
|
||||
private final Collector childCollector;
|
||||
private int parentDoc;
|
||||
private float parentScore;
|
||||
private int nextChildDoc;
|
||||
|
@ -211,12 +219,13 @@ public class BlockJoinQuery extends Query {
|
|||
private float[] pendingChildScores;
|
||||
private int childDocUpto;
|
||||
|
||||
public BlockJoinScorer(Weight weight, Scorer childScorer, OpenBitSet parentBits, int firstChildDoc, ScoreMode scoreMode) {
|
||||
public BlockJoinScorer(Weight weight, Scorer childScorer, OpenBitSet parentBits, int firstChildDoc, ScoreMode scoreMode, Collector childCollector) {
|
||||
super(weight);
|
||||
//System.out.println("Q.init firstChildDoc=" + firstChildDoc);
|
||||
this.parentBits = parentBits;
|
||||
this.childScorer = childScorer;
|
||||
this.scoreMode = scoreMode;
|
||||
this.childCollector = childCollector;
|
||||
if (scoreMode != ScoreMode.None) {
|
||||
pendingChildScores = new float[5];
|
||||
}
|
||||
|
@ -292,6 +301,10 @@ public class BlockJoinQuery extends Query {
|
|||
maxScore = Math.max(childScore, maxScore);
|
||||
totalScore += childScore;
|
||||
}
|
||||
|
||||
// CHANGE:
|
||||
childCollector.collect(nextChildDoc);
|
||||
|
||||
childDocUpto++;
|
||||
nextChildDoc = childScorer.nextDoc();
|
||||
} while (nextChildDoc < parentDoc);
|
||||
|
@ -368,7 +381,7 @@ public class BlockJoinQuery extends Query {
|
|||
return new BlockJoinQuery(childQuery,
|
||||
childRewrite,
|
||||
parentsFilter,
|
||||
scoreMode);
|
||||
scoreMode).setCollector(childCollector);
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
|
@ -415,6 +428,6 @@ public class BlockJoinQuery extends Query {
|
|||
public Object clone() {
|
||||
return new BlockJoinQuery((Query) origChildQuery.clone(),
|
||||
parentsFilter,
|
||||
scoreMode);
|
||||
scoreMode).setCollector(childCollector);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.nested;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.lucene.docset.DocSets;
|
||||
import org.elasticsearch.common.lucene.docset.OpenBitDocSet;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A collector that accepts parent docs, and calls back the collect on child docs of that parent.
|
||||
*/
|
||||
public class NestedChildrenCollector extends FacetCollector {
|
||||
|
||||
private final FacetCollector collector;
|
||||
|
||||
private final Filter parentFilter;
|
||||
|
||||
private final Filter childFilter;
|
||||
|
||||
private DocSet childDocs;
|
||||
|
||||
private OpenBitSet parentDocs;
|
||||
|
||||
private IndexReader currentReader;
|
||||
|
||||
public NestedChildrenCollector(FacetCollector collector, Filter parentFilter, Filter childFilter) {
|
||||
this.collector = collector;
|
||||
this.parentFilter = parentFilter;
|
||||
this.childFilter = childFilter;
|
||||
}
|
||||
|
||||
@Override public Facet facet() {
|
||||
return collector.facet();
|
||||
}
|
||||
|
||||
@Override public void setFilter(Filter filter) {
|
||||
// delegate the facet_filter to the children
|
||||
collector.setFilter(filter);
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) throws IOException {
|
||||
collector.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
collector.setNextReader(reader, docBase);
|
||||
currentReader = reader;
|
||||
childDocs = DocSets.convert(reader, childFilter.getDocIdSet(reader));
|
||||
parentDocs = ((OpenBitDocSet) parentFilter.getDocIdSet(reader)).set();
|
||||
}
|
||||
|
||||
@Override public boolean acceptsDocsOutOfOrder() {
|
||||
return collector.acceptsDocsOutOfOrder();
|
||||
}
|
||||
|
||||
@Override public void collect(int parentDoc) throws IOException {
|
||||
int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
|
||||
for (int i = (parentDoc - 1); i > prevParentDoc; i--) {
|
||||
if (!currentReader.isDeleted(i) && childDocs.get(i)) {
|
||||
collector.collect(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -37,6 +37,8 @@ public abstract class AbstractFacetBuilder implements ToXContent {
|
|||
|
||||
protected FilterBuilder facetFilter;
|
||||
|
||||
protected String nested;
|
||||
|
||||
protected AbstractFacetBuilder(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
@ -46,6 +48,15 @@ public abstract class AbstractFacetBuilder implements ToXContent {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public AbstractFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the facet to run in a global scope, not bounded by any query.
|
||||
*/
|
||||
|
@ -68,6 +79,10 @@ public abstract class AbstractFacetBuilder implements ToXContent {
|
|||
facetFilter.toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (nested != null) {
|
||||
builder.field("nested", nested);
|
||||
}
|
||||
|
||||
if (scope != null) {
|
||||
builder.field("scope", scope);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,10 @@ import org.apache.lucene.search.Filter;
|
|||
import org.elasticsearch.common.collect.Lists;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.search.nested.NestedChildrenCollector;
|
||||
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
|
@ -72,6 +76,7 @@ public class FacetParseElement implements SearchParseElement {
|
|||
String facetFieldName = null;
|
||||
Filter filter = null;
|
||||
boolean cacheFilter = true;
|
||||
String nestedPath = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
facetFieldName = parser.currentName();
|
||||
|
@ -90,10 +95,12 @@ public class FacetParseElement implements SearchParseElement {
|
|||
if (parser.booleanValue()) {
|
||||
scope = ContextIndexSearcher.Scopes.GLOBAL;
|
||||
}
|
||||
} else if ("scope".equals(facetFieldName)) {
|
||||
} else if ("scope".equals(facetFieldName) || "_scope".equals(facetFieldName)) {
|
||||
scope = parser.text();
|
||||
} else if ("cache_filter".equals(facetFieldName) || "cacheFilter".equals(facetFieldName)) {
|
||||
cacheFilter = parser.booleanValue();
|
||||
} else if ("nested".equals(facetFieldName)) {
|
||||
nestedPath = parser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -104,6 +111,22 @@ public class FacetParseElement implements SearchParseElement {
|
|||
facet.setFilter(filter);
|
||||
}
|
||||
|
||||
if (nestedPath != null) {
|
||||
// its a nested facet, wrap the collector with a facet one...
|
||||
MapperService.SmartNameObjectMapper mapper = context.mapperService().smartNameObjectMapper(nestedPath);
|
||||
if (mapper == null) {
|
||||
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
|
||||
}
|
||||
ObjectMapper objectMapper = mapper.mapper();
|
||||
if (objectMapper == null) {
|
||||
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] not found");
|
||||
}
|
||||
if (!objectMapper.nested().isNested()) {
|
||||
throw new SearchParseException(context, "facet nested path [" + nestedPath + "] is not nested");
|
||||
}
|
||||
facet = new NestedChildrenCollector(facet, context.filterCache().cache(NonNestedDocsFilter.INSTANCE), context.filterCache().cache(objectMapper.nestedTypeFilter()));
|
||||
}
|
||||
|
||||
if (facetCollectors == null) {
|
||||
facetCollectors = Lists.newArrayList();
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.Lists;
|
|||
import org.elasticsearch.common.collect.Maps;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.search.nested.BlockJoinQuery;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
|
@ -61,6 +62,19 @@ public class FacetPhase implements SearchPhase {
|
|||
}
|
||||
|
||||
@Override public void preProcess(SearchContext context) {
|
||||
// add specific facets to nested queries...
|
||||
if (context.nestedQueries() != null) {
|
||||
for (Map.Entry<String, BlockJoinQuery> entry : context.nestedQueries().entrySet()) {
|
||||
List<Collector> collectors = context.searcher().removeCollectors(entry.getKey());
|
||||
if (collectors != null && !collectors.isEmpty()) {
|
||||
if (collectors.size() == 1) {
|
||||
entry.getValue().setCollector(collectors.get(0));
|
||||
} else {
|
||||
entry.getValue().setCollector(MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void execute(SearchContext context) throws ElasticSearchException {
|
||||
|
|
|
@ -150,6 +150,16 @@ public class DateHistogramFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public DateHistogramFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (keyFieldName == null) {
|
||||
throw new SearchSourceBuilderException("field must be set on date histogram facet for facet [" + name + "]");
|
||||
|
|
|
@ -58,6 +58,15 @@ public class FilterFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public FilterFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FilterFacetBuilder filter(FilterBuilder filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
|
|
|
@ -217,6 +217,15 @@ public class GeoDistanceFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public GeoDistanceFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (fieldName == null) {
|
||||
throw new SearchSourceBuilderException("field must be set on geo_distance facet for facet [" + name + "]");
|
||||
|
|
|
@ -132,6 +132,15 @@ public class HistogramFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public HistogramFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (keyFieldName == null) {
|
||||
throw new SearchSourceBuilderException("field must be set on histogram facet for facet [" + name + "]");
|
||||
|
|
|
@ -118,6 +118,15 @@ public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public HistogramScriptFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (keyScript == null && keyFieldName == null) {
|
||||
throw new SearchSourceBuilderException("key_script or key_field must be set on histogram script facet for facet [" + name + "]");
|
||||
|
|
|
@ -59,6 +59,15 @@ public class QueryFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public QueryFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QueryFacetBuilder query(QueryBuilder query) {
|
||||
this.query = query;
|
||||
return this;
|
||||
|
|
|
@ -149,6 +149,15 @@ public class RangeFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public RangeFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (keyFieldName == null) {
|
||||
throw new SearchSourceBuilderException("field must be set on range facet for facet [" + name + "]");
|
||||
|
|
|
@ -125,6 +125,15 @@ public class RangeScriptFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public RangeScriptFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (keyScript == null) {
|
||||
throw new SearchSourceBuilderException("key_script must be set on range script facet for facet [" + name + "]");
|
||||
|
|
|
@ -71,6 +71,15 @@ public class StatisticalFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public StatisticalFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (fieldName == null && fieldsNames == null) {
|
||||
throw new SearchSourceBuilderException("field must be set on statistical facet for facet [" + name + "]");
|
||||
|
|
|
@ -61,6 +61,15 @@ public class StatisticalScriptFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public StatisticalScriptFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The language of the script.
|
||||
*/
|
||||
|
|
|
@ -83,6 +83,15 @@ public class TermsFacetBuilder extends AbstractFacetBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path the facet will execute on. A match (root object) will then cause all the
|
||||
* nested objects matching the path to be computed into the facet.
|
||||
*/
|
||||
public TermsFacetBuilder nested(String nested) {
|
||||
this.nested = nested;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The field the terms will be collected from.
|
||||
*/
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.index.engine.Engine;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.search.nested.BlockJoinQuery;
|
||||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -50,7 +51,9 @@ import org.elasticsearch.search.lookup.SearchLookup;
|
|||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
|
@ -148,6 +151,8 @@ public class SearchContext implements Releasable {
|
|||
|
||||
private List<ScopePhase> scopePhases = null;
|
||||
|
||||
private Map<String, BlockJoinQuery> nestedQueries;
|
||||
|
||||
public SearchContext(long id, SearchShardTarget shardTarget, SearchType searchType, int numberOfShards, TimeValue timeout,
|
||||
String[] types, Engine.Searcher engineSearcher, IndexService indexService, ScriptService scriptService) {
|
||||
this.id = id;
|
||||
|
@ -486,4 +491,15 @@ public class SearchContext implements Releasable {
|
|||
}
|
||||
this.scopePhases.add(scopePhase);
|
||||
}
|
||||
|
||||
public Map<String, BlockJoinQuery> nestedQueries() {
|
||||
return this.nestedQueries;
|
||||
}
|
||||
|
||||
public void addNestedQuery(String scope, BlockJoinQuery query) {
|
||||
if (nestedQueries == null) {
|
||||
nestedQueries = new HashMap<String, BlockJoinQuery>();
|
||||
}
|
||||
nestedQueries.put(scope, query);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,9 @@ import org.elasticsearch.action.delete.DeleteResponse;
|
|||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.search.facet.FacetBuilders;
|
||||
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
|
||||
import org.elasticsearch.test.integration.AbstractNodesTests;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
|
@ -226,4 +229,85 @@ public class SimpleNestedTests extends AbstractNodesTests {
|
|||
assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));
|
||||
assertThat(searchResponse.hits().totalHits(), equalTo(0l));
|
||||
}
|
||||
|
||||
@Test public void testFacetsSingleShard() throws Exception {
|
||||
testFacets(1);
|
||||
}
|
||||
|
||||
@Test public void testFacetsMultiShards() throws Exception {
|
||||
testFacets(3);
|
||||
}
|
||||
|
||||
private void testFacets(int numberOfShards) throws Exception {
|
||||
client.admin().indices().prepareDelete().execute().actionGet();
|
||||
|
||||
client.admin().indices().prepareCreate("test")
|
||||
.setSettings(ImmutableSettings.settingsBuilder().put("number_of_shards", numberOfShards))
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("nested1")
|
||||
.field("type", "nested").startObject("properties")
|
||||
.startObject("nested2").field("type", "nested").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
client.prepareIndex("test", "type1", "1").setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.startArray("nested1")
|
||||
.startObject().field("field1_1", "1").startArray("nested2").startObject().field("field2_1", "blue").field("field2_2", 5).endObject().startObject().field("field2_1", "yellow").field("field2_2", 3).endObject().endArray().endObject()
|
||||
.startObject().field("field1_1", "4").startArray("nested2").startObject().field("field2_1", "green").field("field2_2", 6).endObject().startObject().field("field2_1", "blue").field("field2_2", 1).endObject().endArray().endObject()
|
||||
.endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client.prepareIndex("test", "type1", "2").setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.startArray("nested1")
|
||||
.startObject().field("field1_1", "2").startArray("nested2").startObject().field("field2_1", "yellow").field("field2_2", 10).endObject().startObject().field("field2_1", "green").field("field2_2", 8).endObject().endArray().endObject()
|
||||
.startObject().field("field1_1", "1").startArray("nested2").startObject().field("field2_1", "blue").field("field2_2", 2).endObject().startObject().field("field2_1", "red").field("field2_2", 12).endObject().endArray().endObject()
|
||||
.endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client.admin().indices().prepareRefresh().execute().actionGet();
|
||||
|
||||
SearchResponse searchResponse = client.prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addFacet(FacetBuilders.termsStatsFacet("facet1").keyField("nested1.nested2.field2_1").valueField("nested1.nested2.field2_2").nested("nested1.nested2"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));
|
||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||
|
||||
TermsStatsFacet termsStatsFacet = searchResponse.facets().facet("facet1");
|
||||
assertThat(termsStatsFacet.entries().size(), equalTo(4));
|
||||
assertThat(termsStatsFacet.entries().get(0).term(), equalTo("blue"));
|
||||
assertThat(termsStatsFacet.entries().get(0).count(), equalTo(3l));
|
||||
assertThat(termsStatsFacet.entries().get(0).total(), equalTo(8d));
|
||||
assertThat(termsStatsFacet.entries().get(1).term(), equalTo("yellow"));
|
||||
assertThat(termsStatsFacet.entries().get(1).count(), equalTo(2l));
|
||||
assertThat(termsStatsFacet.entries().get(1).total(), equalTo(13d));
|
||||
assertThat(termsStatsFacet.entries().get(2).term(), equalTo("green"));
|
||||
assertThat(termsStatsFacet.entries().get(2).count(), equalTo(2l));
|
||||
assertThat(termsStatsFacet.entries().get(2).total(), equalTo(14d));
|
||||
assertThat(termsStatsFacet.entries().get(3).term(), equalTo("red"));
|
||||
assertThat(termsStatsFacet.entries().get(3).count(), equalTo(1l));
|
||||
assertThat(termsStatsFacet.entries().get(3).total(), equalTo(12d));
|
||||
|
||||
// test scope ones
|
||||
searchResponse = client.prepareSearch("test")
|
||||
.setQuery(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2_1", "blue")).scope("my"))
|
||||
.addFacet(FacetBuilders.termsStatsFacet("facet1").keyField("nested1.nested2.field2_1").valueField("nested1.nested2.field2_2").scope("my"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));
|
||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||
|
||||
termsStatsFacet = searchResponse.facets().facet("facet1");
|
||||
assertThat(termsStatsFacet.entries().size(), equalTo(1));
|
||||
assertThat(termsStatsFacet.entries().get(0).term(), equalTo("blue"));
|
||||
assertThat(termsStatsFacet.entries().get(0).count(), equalTo(3l));
|
||||
assertThat(termsStatsFacet.entries().get(0).total(), equalTo(8d));
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue