Remove scope support in query and facet dsl.

Remove support for the `scope` field in facets and `_scope` field in the nested and parent/child queries. The scope support for nested queries will be replaced by the `nested` facet option and a facet filter with a nested filter. The nested filters will now support the a `join` option. Which controls whether to perform the block join. By default this enabled, but when disabled it returns the nested documents as hits instead of the joined root document.

Search request with the current scope support.
```
curl -s -XPOST 'localhost:9200/products/_search' -d '{
    "query" : {
		"nested" : {
			"path" : "offers",
			"query" : {
				"match" : {
					"offers.color" : "blue"
				}
			},
			"_scope" : "my_scope"
		}
	},
	"facets" : {
		"size" : {
			"terms" : {
				"field" : "offers.size"
			},
			"scope" : "my_scope"
		}
	}
}'
```

The following will be functional equivalent of using the scope support:
```
curl -s -XPOST 'localhost:9200/products/_search?search_type=count' -d '{
    "query" : {
		"nested" : {
			"path" : "offers",
			"query" : {
				"match" : {
					"offers.color" : "blue"
				}
			}
		}
	},
	"facets" : {
		"size" : {
			"terms" : {
				"field" : "offers.size"
			},
			"facet_filter" : {
				"nested" : {
					"path" : "offers",
					"query" : {
						"match" : {
							"offers.color" : "blue"
						}
					},
					"join" : false
				}
			},
			"nested" : "offers"
		}
	}
}'
```

The scope support for parent/child queries will be replaced by running the child query as filter in a global facet.

Search request with the current scope support:
```
curl -s -XPOST 'localhost:9200/products/_search' -d '{
	"query" : {
		"has_child" : {
			"type" : "offer",
			"query" : {
				"match" : {
					"color" : "blue"
				}
			},
			"_scope" : "my_scope"
		}
	},
	"facets" : {
		"size" : {
			"terms" : {
				"field" : "size"
			},
			"scope" : "my_scope"
		}
	}
}'
```

The following is the functional equivalent of using the scope support with parent/child queries:
```
curl -s -XPOST 'localhost:9200/products/_search' -d '{
	"query" : {
		"has_child" : {
			"type" : "offer",
			"query" : {
				"match" : {
					"color" : "blue"
				}
			}
		}
	},
	"facets" : {
		"size" : {
			"terms" : {
				"field" : "size"
			},
			"global" : true,
			"facet_filter" : {
				"term" : {
					"color" : "blue"
				}
			}
		}
	}
}'
```

Closes #2606
This commit is contained in:
Martijn van Groningen 2013-01-31 15:09:57 +01:00
parent 355381962b
commit 46dd42920c
21 changed files with 81 additions and 640 deletions

View File

@ -97,6 +97,12 @@
<version>${lucene.version}</version> <version>${lucene.version}</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-join</artifactId>
<version>${lucene.version}</version>
<scope>compile</scope>
</dependency>
<!-- START: dependencies that are shaded --> <!-- START: dependencies that are shaded -->

View File

@ -31,7 +31,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
private final FilterBuilder filterBuilder; private final FilterBuilder filterBuilder;
private final QueryBuilder queryBuilder; private final QueryBuilder queryBuilder;
private String childType; private String childType;
private String scope;
private String filterName; private String filterName;
private String executionType; private String executionType;
@ -47,11 +46,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
this.filterBuilder = filterBuilder; this.filterBuilder = filterBuilder;
} }
public HasChildFilterBuilder scope(String scope) {
this.scope = scope;
return this;
}
/** /**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit. * Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/ */
@ -81,9 +75,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
filterBuilder.toXContent(builder, params); filterBuilder.toXContent(builder, params);
} }
builder.field("child_type", childType); builder.field("child_type", childType);
if (scope != null) {
builder.field("_scope", scope);
}
if (filterName != null) { if (filterName != null) {
builder.field("_name", filterName); builder.field("_name", filterName);
} }

View File

@ -55,7 +55,6 @@ public class HasChildFilterParser implements FilterParser {
Query query = null; Query query = null;
boolean queryFound = false; boolean queryFound = false;
String childType = null; String childType = null;
String scope = null;
String executionType = "uid"; String executionType = "uid";
String filterName = null; String filterName = null;
@ -92,7 +91,7 @@ public class HasChildFilterParser implements FilterParser {
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) { if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
childType = parser.text(); childType = parser.text();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] filter has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("_name".equals(currentFieldName)) { } else if ("_name".equals(currentFieldName)) {
filterName = parser.text(); filterName = parser.text();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed. } else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed.
@ -126,7 +125,7 @@ public class HasChildFilterParser implements FilterParser {
SearchContext searchContext = SearchContext.current(); SearchContext searchContext = SearchContext.current();
HasChildFilter childFilter = HasChildFilter.create(query, scope, parentType, childType, searchContext, executionType); HasChildFilter childFilter = HasChildFilter.create(query, null, parentType, childType, searchContext, executionType);
searchContext.addScopePhase(childFilter); searchContext.addScopePhase(childFilter);
if (filterName != null) { if (filterName != null) {

View File

@ -32,8 +32,6 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
private String childType; private String childType;
private String scope;
private float boost = 1.0f; private float boost = 1.0f;
private String scoreType; private String scoreType;
@ -45,15 +43,6 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
this.queryBuilder = queryBuilder; this.queryBuilder = queryBuilder;
} }
/**
* The scope of the query, which can later be used, for example, to run facets against the child docs that
* matches the query.
*/
public HasChildQueryBuilder scope(String scope) {
this.scope = scope;
return this;
}
/** /**
* Sets the boost for this query. Documents matching this query will (in addition to the normal * Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided. * weightings) have their score multiplied by the boost provided.
@ -88,15 +77,15 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
builder.field("query"); builder.field("query");
queryBuilder.toXContent(builder, params); queryBuilder.toXContent(builder, params);
builder.field("child_type", childType); builder.field("child_type", childType);
if (scope != null) {
builder.field("_scope", scope);
}
if (boost != 1.0f) { if (boost != 1.0f) {
builder.field("boost", boost); builder.field("boost", boost);
} }
if (scoreType != null) { if (scoreType != null) {
builder.field("score_type", scoreType); builder.field("score_type", scoreType);
} }
if (executionType != null) {
builder.field("execution_type", executionType);
}
builder.endObject(); builder.endObject();
} }
} }

View File

@ -57,7 +57,6 @@ public class HasChildQueryParser implements QueryParser {
boolean queryFound = false; boolean queryFound = false;
float boost = 1.0f; float boost = 1.0f;
String childType = null; String childType = null;
String scope = null;
ScoreType scoreType = null; ScoreType scoreType = null;
String executionType = "uid"; String executionType = "uid";
@ -83,10 +82,10 @@ public class HasChildQueryParser implements QueryParser {
} else if (token.isValue()) { } else if (token.isValue()) {
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) { if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
childType = parser.text(); childType = parser.text();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed.
executionType = parser.text();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] query has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {
scoreType = ScoreType.fromString(parser.text());
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) { } else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
String scoreTypeValue = parser.text(); String scoreTypeValue = parser.text();
if (!"none".equals(scoreTypeValue)) { if (!"none".equals(scoreTypeValue)) {
@ -125,11 +124,11 @@ public class HasChildQueryParser implements QueryParser {
Query query; Query query;
if (scoreType != null) { if (scoreType != null) {
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null); Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, scope, innerQuery, scoreType); ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, null, innerQuery, scoreType);
searchContext.addScopePhase(childrenQuery); searchContext.addScopePhase(childrenQuery);
query = childrenQuery; query = childrenQuery;
} else { } else {
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, scope, parentType, childType, searchContext, executionType); HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, null, parentType, childType, searchContext, executionType);
searchContext.addScopePhase(hasChildFilter); searchContext.addScopePhase(hasChildFilter);
query = new ConstantScoreQuery(hasChildFilter); query = new ConstantScoreQuery(hasChildFilter);
} }

View File

@ -31,7 +31,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
private final QueryBuilder queryBuilder; private final QueryBuilder queryBuilder;
private final FilterBuilder filterBuilder; private final FilterBuilder filterBuilder;
private final String parentType; private final String parentType;
private String scope;
private String filterName; private String filterName;
private String executionType; private String executionType;
@ -55,11 +54,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
this.filterBuilder = parentFilter; this.filterBuilder = parentFilter;
} }
public HasParentFilterBuilder scope(String scope) {
this.scope = scope;
return this;
}
public HasParentFilterBuilder filterName(String filterName) { public HasParentFilterBuilder filterName(String filterName) {
this.filterName = filterName; this.filterName = filterName;
return this; return this;
@ -86,9 +80,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
filterBuilder.toXContent(builder, params); filterBuilder.toXContent(builder, params);
} }
builder.field("parent_type", parentType); builder.field("parent_type", parentType);
if (scope != null) {
builder.field("_scope", scope);
}
if (filterName != null) { if (filterName != null) {
builder.field("_name", filterName); builder.field("_name", filterName);
} }

View File

@ -56,7 +56,6 @@ public class HasParentFilterParser implements FilterParser {
boolean queryFound = false; boolean queryFound = false;
String parentType = null; String parentType = null;
String executionType = "uid"; String executionType = "uid";
String scope = null;
String filterName = null; String filterName = null;
String currentFieldName = null; String currentFieldName = null;
@ -91,7 +90,7 @@ public class HasParentFilterParser implements FilterParser {
if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) { if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
parentType = parser.text(); parentType = parser.text();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] filter has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("_name".equals(currentFieldName)) { } else if ("_name".equals(currentFieldName)) {
filterName = parser.text(); filterName = parser.text();
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) { // This option is experimental and will most likely be removed. } else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) { // This option is experimental and will most likely be removed.
@ -122,7 +121,7 @@ public class HasParentFilterParser implements FilterParser {
SearchContext searchContext = SearchContext.current(); SearchContext searchContext = SearchContext.current();
HasParentFilter parentFilter = HasParentFilter.create(executionType, query, scope, parentType, searchContext); HasParentFilter parentFilter = HasParentFilter.create(executionType, query, null, parentType, searchContext);
searchContext.addScopePhase(parentFilter); searchContext.addScopePhase(parentFilter);
if (filterName != null) { if (filterName != null) {

View File

@ -32,7 +32,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
private final String parentType; private final String parentType;
private String scoreType; private String scoreType;
private String executionType; private String executionType;
private String scope;
private float boost = 1.0f; private float boost = 1.0f;
/** /**
@ -44,11 +43,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
this.queryBuilder = parentQuery; this.queryBuilder = parentQuery;
} }
public HasParentQueryBuilder scope(String scope) {
this.scope = scope;
return this;
}
public HasParentQueryBuilder boost(float boost) { public HasParentQueryBuilder boost(float boost) {
this.boost = boost; this.boost = boost;
return this; return this;
@ -78,9 +72,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
builder.field("query"); builder.field("query");
queryBuilder.toXContent(builder, params); queryBuilder.toXContent(builder, params);
builder.field("parent_type", parentType); builder.field("parent_type", parentType);
if (scope != null) {
builder.field("_scope", scope);
}
if (scoreType != null) { if (scoreType != null) {
builder.field("score_type", scoreType); builder.field("score_type", scoreType);
} }

View File

@ -59,7 +59,6 @@ public class HasParentQueryParser implements QueryParser {
boolean queryFound = false; boolean queryFound = false;
float boost = 1.0f; float boost = 1.0f;
String parentType = null; String parentType = null;
String scope = null;
boolean score = false; boolean score = false;
String executionType = "uid"; String executionType = "uid";
@ -85,7 +84,7 @@ public class HasParentQueryParser implements QueryParser {
if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) { if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
parentType = parser.text(); parentType = parser.text();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] query has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) { } else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {
executionType = parser.text(); executionType = parser.text();
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) { } else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
@ -150,11 +149,11 @@ public class HasParentQueryParser implements QueryParser {
SearchContext searchContext = SearchContext.current(); SearchContext searchContext = SearchContext.current();
Query query; Query query;
if (score) { if (score) {
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter, scope); ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter, null);
searchContext.addScopePhase(parentQuery); searchContext.addScopePhase(parentQuery);
query = parentQuery; query = parentQuery;
} else { } else {
HasParentFilter hasParentFilter = HasParentFilter.create(executionType, innerQuery, scope, parentType, searchContext); HasParentFilter hasParentFilter = HasParentFilter.create(executionType, innerQuery, null, parentType, searchContext);
searchContext.addScopePhase(hasParentFilter); searchContext.addScopePhase(hasParentFilter);
query = new ConstantScoreQuery(hasParentFilter); query = new ConstantScoreQuery(hasParentFilter);
} }

View File

@ -29,8 +29,7 @@ public class NestedFilterBuilder extends BaseFilterBuilder {
private final FilterBuilder filterBuilder; private final FilterBuilder filterBuilder;
private final String path; private final String path;
private Boolean join;
private String scope;
private Boolean cache; private Boolean cache;
private String cacheKey; private String cacheKey;
@ -48,9 +47,8 @@ public class NestedFilterBuilder extends BaseFilterBuilder {
this.filterBuilder = filterBuilder; this.filterBuilder = filterBuilder;
} }
public NestedFilterBuilder join(boolean join) {
public NestedFilterBuilder scope(String scope) { this.join = join;
this.scope = scope;
return this; return this;
} }
@ -85,10 +83,10 @@ public class NestedFilterBuilder extends BaseFilterBuilder {
builder.field("filter"); builder.field("filter");
filterBuilder.toXContent(builder, params); filterBuilder.toXContent(builder, params);
} }
builder.field("path", path); if (join != null) {
if (scope != null) { builder.field("join", join);
builder.field("_scope", scope);
} }
builder.field("path", path);
if (filterName != null) { if (filterName != null) {
builder.field("_name", filterName); builder.field("_name", filterName);
} }

View File

@ -22,6 +22,8 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery; import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
@ -30,9 +32,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter; import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.BlockJoinQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter; import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -58,7 +58,7 @@ public class NestedFilterParser implements FilterParser {
Filter filter = null; Filter filter = null;
boolean filterFound = false; boolean filterFound = false;
float boost = 1.0f; float boost = 1.0f;
String scope = null; boolean join = true;
String path = null; String path = null;
boolean cache = false; boolean cache = false;
CacheKeyFilter.Key cacheKey = null; CacheKeyFilter.Key cacheKey = null;
@ -87,12 +87,14 @@ public class NestedFilterParser implements FilterParser {
throw new QueryParsingException(parseContext.index(), "[nested] filter does not support [" + currentFieldName + "]"); throw new QueryParsingException(parseContext.index(), "[nested] filter does not support [" + currentFieldName + "]");
} }
} else if (token.isValue()) { } else if (token.isValue()) {
if ("path".equals(currentFieldName)) { if ("join".equals(currentFieldName)) {
join = parser.booleanValue();
} else if ("path".equals(currentFieldName)) {
path = parser.text(); path = parser.text();
} else if ("boost".equals(currentFieldName)) { } else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue(); boost = parser.floatValue();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [nested] filter has been removed, use nested filter as a facet_filter in the relevant facet");
} else if ("_name".equals(currentFieldName)) { } else if ("_name".equals(currentFieldName)) {
filterName = parser.text(); filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) { } else if ("_cache".equals(currentFieldName)) {
@ -149,21 +151,21 @@ public class NestedFilterParser implements FilterParser {
parentFilter = parseContext.cacheFilter(parentFilter, null); parentFilter = parseContext.cacheFilter(parentFilter, null);
} }
BlockJoinQuery joinQuery = new BlockJoinQuery(query, parentFilter, BlockJoinQuery.ScoreMode.None); Filter nestedFilter;
if (join) {
if (scope != null) { ToParentBlockJoinQuery joinQuery = new ToParentBlockJoinQuery(query, parentFilter, ScoreMode.None);
SearchContext.current().addNestedQuery(scope, joinQuery); nestedFilter = new QueryWrapperFilter(joinQuery);
} else {
nestedFilter = new QueryWrapperFilter(query);
} }
Filter joinFilter = new QueryWrapperFilter(joinQuery);
if (cache) { if (cache) {
joinFilter = parseContext.cacheFilter(joinFilter, cacheKey); nestedFilter = parseContext.cacheFilter(nestedFilter, cacheKey);
} }
if (filterName != null) { if (filterName != null) {
parseContext.addNamedFilter(filterName, joinFilter); parseContext.addNamedFilter(filterName, nestedFilter);
} }
return joinFilter; return nestedFilter;
} finally { } finally {
// restore the thread local one... // restore the thread local one...
NestedQueryParser.parentFilterContext.set(currentParentFilterContext); NestedQueryParser.parentFilterContext.set(currentParentFilterContext);

View File

@ -34,8 +34,6 @@ public class NestedQueryBuilder extends BaseQueryBuilder implements BoostableQue
private float boost = 1.0f; private float boost = 1.0f;
private String scope;
public NestedQueryBuilder(String path, QueryBuilder queryBuilder) { public NestedQueryBuilder(String path, QueryBuilder queryBuilder) {
this.path = path; this.path = path;
this.queryBuilder = queryBuilder; this.queryBuilder = queryBuilder;
@ -56,11 +54,6 @@ public class NestedQueryBuilder extends BaseQueryBuilder implements BoostableQue
return this; return this;
} }
public NestedQueryBuilder scope(String scope) {
this.scope = scope;
return this;
}
/** /**
* Sets the boost for this query. Documents matching this query will (in addition to the normal * Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided. * weightings) have their score multiplied by the boost provided.
@ -84,9 +77,6 @@ public class NestedQueryBuilder extends BaseQueryBuilder implements BoostableQue
if (scoreMode != null) { if (scoreMode != null) {
builder.field("score_mode", scoreMode); builder.field("score_mode", scoreMode);
} }
if (scope != null) {
builder.field("_scope", scope);
}
if (boost != 1.0f) { if (boost != 1.0f) {
builder.field("boost", boost); builder.field("boost", boost);
} }

View File

@ -23,6 +23,8 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -31,9 +33,7 @@ import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.BlockJoinQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter; import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -59,9 +59,8 @@ public class NestedQueryParser implements QueryParser {
Filter filter = null; Filter filter = null;
boolean filterFound = false; boolean filterFound = false;
float boost = 1.0f; float boost = 1.0f;
String scope = null;
String path = null; String path = null;
BlockJoinQuery.ScoreMode scoreMode = BlockJoinQuery.ScoreMode.Avg; ScoreMode scoreMode = ScoreMode.Avg;
// we need a late binding filter so we can inject a parent nested filter inner nested queries // we need a late binding filter so we can inject a parent nested filter inner nested queries
LateBindingParentFilter currentParentFilterContext = parentFilterContext.get(); LateBindingParentFilter currentParentFilterContext = parentFilterContext.get();
@ -91,17 +90,17 @@ public class NestedQueryParser implements QueryParser {
} else if ("boost".equals(currentFieldName)) { } else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue(); boost = parser.floatValue();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [nested] query has been removed, use nested filter as a facet_filter in the relevant facet");
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
String sScoreMode = parser.text(); String sScoreMode = parser.text();
if ("avg".equals(sScoreMode)) { if ("avg".equals(sScoreMode)) {
scoreMode = BlockJoinQuery.ScoreMode.Avg; scoreMode = ScoreMode.Avg;
} else if ("max".equals(sScoreMode)) { } else if ("max".equals(sScoreMode)) {
scoreMode = BlockJoinQuery.ScoreMode.Max; scoreMode = ScoreMode.Max;
} else if ("total".equals(sScoreMode)) { } else if ("total".equals(sScoreMode)) {
scoreMode = BlockJoinQuery.ScoreMode.Total; scoreMode = ScoreMode.Total;
} else if ("none".equals(sScoreMode)) { } else if ("none".equals(sScoreMode)) {
scoreMode = BlockJoinQuery.ScoreMode.None; scoreMode = ScoreMode.None;
} else { } else {
throw new QueryParsingException(parseContext.index(), "illegal score_mode for nested query [" + sScoreMode + "]"); throw new QueryParsingException(parseContext.index(), "illegal score_mode for nested query [" + sScoreMode + "]");
} }
@ -153,13 +152,8 @@ public class NestedQueryParser implements QueryParser {
parentFilter = parseContext.cacheFilter(parentFilter, null); parentFilter = parseContext.cacheFilter(parentFilter, null);
} }
BlockJoinQuery joinQuery = new BlockJoinQuery(query, parentFilter, scoreMode); ToParentBlockJoinQuery joinQuery = new ToParentBlockJoinQuery(query, parentFilter, scoreMode);
joinQuery.setBoost(boost); joinQuery.setBoost(boost);
if (scope != null) {
SearchContext.current().addNestedQuery(scope, joinQuery);
}
return joinQuery; return joinQuery;
} finally { } finally {
// restore the thread local one... // restore the thread local one...

View File

@ -32,8 +32,6 @@ public class TopChildrenQueryBuilder extends BaseQueryBuilder implements Boostab
private String childType; private String childType;
private String scope;
private String score; private String score;
private float boost = 1.0f; private float boost = 1.0f;
@ -47,15 +45,6 @@ public class TopChildrenQueryBuilder extends BaseQueryBuilder implements Boostab
this.queryBuilder = queryBuilder; this.queryBuilder = queryBuilder;
} }
/**
* The scope of the query, which can later be used, for example, to run facets against the child docs that
* matches the query.
*/
public TopChildrenQueryBuilder scope(String scope) {
this.scope = scope;
return this;
}
/** /**
* How to compute the score. Possible values are: <tt>max</tt>, <tt>sum</tt>, or <tt>avg</tt>. Defaults * How to compute the score. Possible values are: <tt>max</tt>, <tt>sum</tt>, or <tt>avg</tt>. Defaults
* to <tt>max</tt>. * to <tt>max</tt>.
@ -97,9 +86,6 @@ public class TopChildrenQueryBuilder extends BaseQueryBuilder implements Boostab
builder.field("query"); builder.field("query");
queryBuilder.toXContent(builder, params); queryBuilder.toXContent(builder, params);
builder.field("type", childType); builder.field("type", childType);
if (scope != null) {
builder.field("_scope", scope);
}
if (score != null) { if (score != null) {
builder.field("score", score); builder.field("score", score);
} }

View File

@ -55,7 +55,6 @@ public class TopChildrenQueryParser implements QueryParser {
boolean queryFound = false; boolean queryFound = false;
float boost = 1.0f; float boost = 1.0f;
String childType = null; String childType = null;
String scope = null;
ScoreType scoreType = ScoreType.MAX; ScoreType scoreType = ScoreType.MAX;
int factor = 5; int factor = 5;
int incrementalFactor = 2; int incrementalFactor = 2;
@ -83,7 +82,7 @@ public class TopChildrenQueryParser implements QueryParser {
if ("type".equals(currentFieldName)) { if ("type".equals(currentFieldName)) {
childType = parser.text(); childType = parser.text();
} else if ("_scope".equals(currentFieldName)) { } else if ("_scope".equals(currentFieldName)) {
scope = parser.text(); throw new QueryParsingException(parseContext.index(), "the [_scope] support in [top_children] query has been removed, use a filter as a facet_filter in the relevant global facet");
} else if ("score".equals(currentFieldName)) { } else if ("score".equals(currentFieldName)) {
scoreType = ScoreType.fromString(parser.text()); scoreType = ScoreType.fromString(parser.text());
} else if ("boost".equals(currentFieldName)) { } else if ("boost".equals(currentFieldName)) {
@ -122,7 +121,7 @@ public class TopChildrenQueryParser implements QueryParser {
query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null)); query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current(); SearchContext searchContext = SearchContext.current();
TopChildrenQuery childQuery = new TopChildrenQuery(query, scope, childType, parentType, scoreType, factor, incrementalFactor); TopChildrenQuery childQuery = new TopChildrenQuery(query, null, childType, parentType, scoreType, factor, incrementalFactor);
searchContext.addScopePhase(childQuery); searchContext.addScopePhase(childQuery);
return childQuery; return childQuery;
} }

View File

@ -1,476 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.search.NoopCollector;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Set;
/**
* This query requires that you index
* children and parent docs as a single block, using the
* {@link org.apache.lucene.index.IndexWriter#addDocuments} or {@link
* org.apache.lucene.index.IndexWriter#updateDocuments} API. In each block, the
* child documents must appear first, ending with the parent
* document. At search time you provide a Filter
* identifying the parents, however this Filter must provide
* an {@link org.apache.lucene.util.FixedBitSet} per sub-reader.
* <p/>
* <p>Once the block index is built, use this query to wrap
* any sub-query matching only child docs and join matches in that
* child document space up to the parent document space.
* You can then use this Query as a clause with
* other queries in the parent document space.</p>
* <p/>
* <p>The child documents must be orthogonal to the parent
* documents: the wrapped child query must never
* return a parent document.</p>
* <p/>
* If you'd like to retrieve {@link TopGroups} for the
* resulting query, use the {@link BlockJoinCollector}.
* Note that this is not necessary, ie, if you simply want
* to collect the parent documents and don't need to see
* which child documents matched under that parent, then
* you can use any collector.
* <p/>
* <p><b>NOTE</b>: If the overall query contains parent-only
* matches, for example you OR a parent-only query with a
* joined child-only query, then the resulting collected documents
* will be correct, however the {@link TopGroups} you get
* from {@link BlockJoinCollector} will not contain every
* child for parents that had matched.
* <p/>
* <p>See {@link org.apache.lucene.search.join} for an
* overview. </p>
*
* @lucene.experimental
*/
// LUCENE MONITOR: Track CHANGE
public class BlockJoinQuery extends Query {
public static enum ScoreMode {None, Avg, Max, Total}
private final Filter parentsFilter;
private final Query childQuery;
private Collector childCollector = NoopCollector.NOOP_COLLECTOR;
public BlockJoinQuery setCollector(Collector collector) {
this.childCollector = collector;
return this;
}
// If we are rewritten, this is the original childQuery we
// were passed; we use this for .equals() and
// .hashCode(). This makes rewritten query equal the
// original, so that user does not have to .rewrite() their
// query before searching:
private final Query origChildQuery;
private final ScoreMode scoreMode;
public BlockJoinQuery(Query childQuery, Filter parentsFilter, ScoreMode scoreMode) {
super();
this.origChildQuery = childQuery;
this.childQuery = childQuery;
this.parentsFilter = parentsFilter;
this.scoreMode = scoreMode;
}
private BlockJoinQuery(Query origChildQuery, Query childQuery, Filter parentsFilter, ScoreMode scoreMode) {
super();
this.origChildQuery = origChildQuery;
this.childQuery = childQuery;
this.parentsFilter = parentsFilter;
this.scoreMode = scoreMode;
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode, childCollector);
}
private static class BlockJoinWeight extends Weight {
private final Query joinQuery;
private final Weight childWeight;
private final Filter parentsFilter;
private final ScoreMode scoreMode;
private final Collector childCollector;
public BlockJoinWeight(Query joinQuery, Weight childWeight, Filter parentsFilter, ScoreMode scoreMode, Collector childCollector) {
super();
this.joinQuery = joinQuery;
this.childWeight = childWeight;
this.parentsFilter = parentsFilter;
this.scoreMode = scoreMode;
this.childCollector = childCollector;
}
@Override
public Query getQuery() {
return joinQuery;
}
@Override
public float getValueForNormalization() throws IOException {
return childWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost();
}
@Override
public void normalize(float norm, float topLevelBoost) {
childWeight.normalize(norm, topLevelBoost * joinQuery.getBoost());
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
// Pass scoreDocsInOrder true, topScorer false to our sub:
final Scorer childScorer = childWeight.scorer(context, true, false, null);
if (childScorer == null) {
// No matches
return null;
}
final int firstChildDoc = childScorer.nextDoc();
if (firstChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
// No matches
return null;
}
DocIdSet parents = parentsFilter.getDocIdSet(context, null);
// TODO NESTED: We have random access in ES, not sure I understand what can be gain?
// TODO: once we do random-access filters we can
// generalize this:
if (parents == null) {
// No matches
return null;
}
if (!(parents instanceof FixedBitSet)) {
throw new IllegalStateException("parentFilter must return OpenBitSet; got " + parents);
}
// CHANGE:
if (childCollector != null) {
childCollector.setNextReader(context);
childCollector.setScorer(childScorer);
}
return new BlockJoinScorer(this, childScorer, (FixedBitSet) parents, firstChildDoc, scoreMode, childCollector, acceptDocs);
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, true, false, context.reader().getLiveDocs());
if (scorer != null) {
if (scorer.advance(doc) == doc) {
return scorer.explain(context.docBase);
}
}
return new ComplexExplanation(false, 0.0f, "Not a match");
}
@Override
public boolean scoresDocsOutOfOrder() {
return false;
}
}
static class BlockJoinScorer extends Scorer {
private final Scorer childScorer;
private final FixedBitSet parentBits;
private final ScoreMode scoreMode;
private final Bits acceptDocs; // LUCENE 4 UPGRADE: Why not make the parentBits already be filtered by acceptDocs?
private final Collector childCollector;
private int parentDoc = -1;
private int prevParentDoc;
private float parentScore;
private int parentFreq;
private int nextChildDoc;
private int[] pendingChildDocs = new int[5];
private float[] pendingChildScores;
private int childDocUpto;
public BlockJoinScorer(Weight weight, Scorer childScorer, FixedBitSet parentBits, int firstChildDoc, ScoreMode scoreMode, Collector childCollector, Bits acceptDocs) {
super(weight);
//System.out.println("Q.init firstChildDoc=" + firstChildDoc);
this.parentBits = parentBits;
this.childScorer = childScorer;
this.scoreMode = scoreMode;
this.acceptDocs = acceptDocs;
this.childCollector = childCollector;
if (scoreMode != ScoreMode.None) {
pendingChildScores = new float[5];
}
nextChildDoc = firstChildDoc;
}
@Override
public Collection<ChildScorer> getChildren() {
return Collections.singleton(new ChildScorer(childScorer, "BLOCK_JOIN"));
}
int getChildCount() {
return childDocUpto;
}
int[] swapChildDocs(int[] other) {
final int[] ret = pendingChildDocs;
if (other == null) {
pendingChildDocs = new int[5];
} else {
pendingChildDocs = other;
}
return ret;
}
float[] swapChildScores(float[] other) {
if (scoreMode == ScoreMode.None) {
throw new IllegalStateException("ScoreMode is None");
}
final float[] ret = pendingChildScores;
if (other == null) {
pendingChildScores = new float[5];
} else {
pendingChildScores = other;
}
return ret;
}
@Override
public int nextDoc() throws IOException {
//System.out.println("Q.nextDoc() nextChildDoc=" + nextChildDoc);
// Loop until we hit a parentDoc that's accepted
while (true) {
if (nextChildDoc == NO_MORE_DOCS) {
//System.out.println(" end");
return parentDoc = NO_MORE_DOCS;
}
// Gather all children sharing the same parent as
// nextChildDoc
parentDoc = parentBits.nextSetBit(nextChildDoc);
//System.out.println(" parentDoc=" + parentDoc);
assert parentDoc != -1;
//System.out.println(" nextChildDoc=" + nextChildDoc);
if (acceptDocs != null && !acceptDocs.get(parentDoc)) {
// Parent doc not accepted; skip child docs until
// we hit a new parent doc:
do {
nextChildDoc = childScorer.nextDoc();
} while (nextChildDoc < parentDoc);
continue;
}
int totalScore = 0;
float maxScore = Float.NEGATIVE_INFINITY;
int maxFreq = 0;
childDocUpto = 0;
parentFreq = 0;
do {
//System.out.println(" c=" + nextChildDoc);
if (pendingChildDocs.length == childDocUpto) {
pendingChildDocs = ArrayUtil.grow(pendingChildDocs);
}
if (scoreMode != ScoreMode.None && pendingChildScores.length == childDocUpto) {
pendingChildScores = ArrayUtil.grow(pendingChildScores);
}
pendingChildDocs[childDocUpto] = nextChildDoc;
if (scoreMode != ScoreMode.None) {
// TODO: specialize this into dedicated classes per-scoreMode
final float childScore = childScorer.score();
final int childFreq = childScorer.freq();
pendingChildScores[childDocUpto] = childScore;
maxScore = Math.max(childScore, maxScore);
maxFreq = Math.max(childFreq, maxFreq);
totalScore += childScore;
parentFreq += childFreq;
}
// CHANGE:
childCollector.collect(nextChildDoc);
childDocUpto++;
nextChildDoc = childScorer.nextDoc();
} while (nextChildDoc < parentDoc);
// Parent & child docs are supposed to be orthogonal:
assert nextChildDoc != parentDoc;
switch (scoreMode) {
case Avg:
parentScore = totalScore / childDocUpto;
break;
case Max:
parentScore = maxScore;
break;
case Total:
parentScore = totalScore;
break;
case None:
break;
}
//System.out.println(" return parentDoc=" + parentDoc);
return parentDoc;
}
}
@Override
public int docID() {
return parentDoc;
}
@Override
public float score() throws IOException {
return parentScore;
}
@Override
public int freq() throws IOException {
return parentFreq;
}
@Override
public int advance(int parentTarget) throws IOException {
//System.out.println("Q.advance parentTarget=" + parentTarget);
if (parentTarget == NO_MORE_DOCS) {
return parentDoc = NO_MORE_DOCS;
}
if (parentTarget == 0) {
// Callers should only be passing in a docID from
// the parent space, so this means this parent
// has no children (it got docID 0), so it cannot
// possibly match. We must handle this case
// separately otherwise we pass invalid -1 to
// prevSetBit below:
return nextDoc();
}
prevParentDoc = parentBits.prevSetBit(parentTarget - 1);
//System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc);
assert prevParentDoc >= parentDoc;
if (prevParentDoc > nextChildDoc) {
nextChildDoc = childScorer.advance(prevParentDoc);
// System.out.println(" childScorer advanced to child docID=" + nextChildDoc);
//} else {
//System.out.println(" skip childScorer advance");
}
// Parent & child docs are supposed to be orthogonal:
assert nextChildDoc != prevParentDoc;
final int nd = nextDoc();
//System.out.println(" return nextParentDoc=" + nd);
return nd;
}
public Explanation explain(int docBase) throws IOException {
int start = docBase + prevParentDoc + 1; // +1 b/c prevParentDoc is previous parent doc
int end = docBase + parentDoc - 1; // -1 b/c parentDoc is parent doc
ComplexExplanation explanation = new ComplexExplanation(
true, score(), String.format(Locale.ROOT, "Score based on score mode %s and child doc range from %d to %d", scoreMode, start, end)
);
for (int i = 0; i < childDocUpto; i++) {
int childDoc = pendingChildDocs[i];
float childScore = pendingChildScores[i];
explanation.addDetail(new Explanation(childScore, String.format(Locale.ROOT, "Child[%d]", childDoc)));
}
return explanation;
}
}
@Override
public void extractTerms(Set<Term> terms) {
childQuery.extractTerms(terms);
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
final Query childRewrite = childQuery.rewrite(reader);
if (childRewrite != childQuery) {
Query rewritten = new BlockJoinQuery(childQuery,
childRewrite,
parentsFilter,
scoreMode).setCollector(childCollector);
rewritten.setBoost(getBoost());
return rewritten;
} else {
return this;
}
}
@Override
public String toString(String field) {
return "BlockJoinQuery (" + childQuery.toString() + ")";
}
@Override
public boolean equals(Object _other) {
if (_other instanceof BlockJoinQuery) {
final BlockJoinQuery other = (BlockJoinQuery) _other;
return origChildQuery.equals(other.origChildQuery) &&
parentsFilter.equals(other.parentsFilter) &&
scoreMode == other.scoreMode;
} else {
return false;
}
}
@Override
public int hashCode() {
final int prime = 31;
int hash = 1;
hash = prime * hash + origChildQuery.hashCode();
hash = prime * hash + scoreMode.hashCode();
hash = prime * hash + parentsFilter.hashCode();
return hash;
}
@Override
public Query clone() {
return new BlockJoinQuery(origChildQuery.clone(),
parentsFilter,
scoreMode).setCollector(childCollector);
}
}

View File

@ -50,8 +50,6 @@ import java.util.List;
* } * }
* } * }
* </pre> * </pre>
*
*
*/ */
public class FacetParseElement implements SearchParseElement { public class FacetParseElement implements SearchParseElement {
@ -98,7 +96,7 @@ public class FacetParseElement implements SearchParseElement {
scope = ContextIndexSearcher.Scopes.GLOBAL; scope = ContextIndexSearcher.Scopes.GLOBAL;
} }
} else if ("scope".equals(facetFieldName) || "_scope".equals(facetFieldName)) { } else if ("scope".equals(facetFieldName) || "_scope".equals(facetFieldName)) {
scope = parser.text(); throw new SearchParseException(context, "the [scope] support in facets have been removed");
} else if ("cache_filter".equals(facetFieldName) || "cacheFilter".equals(facetFieldName)) { } else if ("cache_filter".equals(facetFieldName) || "cacheFilter".equals(facetFieldName)) {
cacheFilter = parser.booleanValue(); cacheFilter = parser.booleanValue();
} else if ("nested".equals(facetFieldName)) { } else if ("nested".equals(facetFieldName)) {

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery; import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery; import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.search.nested.BlockJoinQuery;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
@ -65,19 +64,6 @@ public class FacetPhase implements SearchPhase {
@Override @Override
public void preProcess(SearchContext context) { public void preProcess(SearchContext context) {
// add specific facets to nested queries...
if (context.nestedQueries() != null) {
for (Map.Entry<String, BlockJoinQuery> entry : context.nestedQueries().entrySet()) {
List<Collector> collectors = context.searcher().removeCollectors(entry.getKey());
if (collectors != null && !collectors.isEmpty()) {
if (collectors.size() == 1) {
entry.getValue().setCollector(collectors.get(0));
} else {
entry.getValue().setCollector(MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
}
}
}
}
} }
@Override @Override

View File

@ -44,7 +44,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.nested.BlockJoinQuery;
import org.elasticsearch.index.service.IndexService; import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard; import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -63,9 +62,7 @@ import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* *
@ -173,8 +170,6 @@ public class SearchContext implements Releasable {
private List<ScopePhase> scopePhases = null; private List<ScopePhase> scopePhases = null;
private Map<String, BlockJoinQuery> nestedQueries;
public SearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, public SearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService) { Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService) {
this.id = id; this.id = id;
@ -580,17 +575,6 @@ public class SearchContext implements Releasable {
this.scopePhases.add(scopePhase); this.scopePhases.add(scopePhase);
} }
public Map<String, BlockJoinQuery> nestedQueries() {
return this.nestedQueries;
}
public void addNestedQuery(String scope, BlockJoinQuery query) {
if (nestedQueries == null) {
nestedQueries = new HashMap<String, BlockJoinQuery>();
}
nestedQueries.put(scope, query);
}
public ScanContext scanContext() { public ScanContext scanContext() {
if (scanContext == null) { if (scanContext == null) {
scanContext = new ScanContext(); scanContext = new ScanContext();

View File

@ -439,8 +439,16 @@ public class SimpleNestedTests extends AbstractNodesTests {
// test scope ones // test scope ones
searchResponse = client.prepareSearch("test") searchResponse = client.prepareSearch("test")
.setQuery(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2_1", "blue")).scope("my")) .setQuery(
.addFacet(FacetBuilders.termsStatsFacet("facet1").keyField("nested1.nested2.field2_1").valueField("nested1.nested2.field2_2").scope("my")) nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2_1", "blue"))
)
.addFacet(
FacetBuilders.termsStatsFacet("facet1")
.keyField("nested1.nested2.field2_1")
.valueField("nested1.nested2.field2_2")
.nested("nested1.nested2")
.facetFilter(nestedFilter("nested1.nested2", termQuery("nested1.nested2.field2_1", "blue")).join(false))
)
.execute().actionGet(); .execute().actionGet();
assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0)); assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));
@ -553,12 +561,13 @@ public class SimpleNestedTests extends AbstractNodesTests {
assertThat(searchResponse.hits().totalHits(), equalTo(1l)); assertThat(searchResponse.hits().totalHits(), equalTo(1l));
Explanation explanation = searchResponse.hits().hits()[0].explanation(); Explanation explanation = searchResponse.hits().hits()[0].explanation();
assertThat(explanation.getValue(), equalTo(2f)); assertThat(explanation.getValue(), equalTo(2f));
assertThat(explanation.getDescription(), equalTo("Score based on score mode Total and child doc range from 0 to 1")); assertThat(explanation.getDescription(), equalTo("Score based on child doc range from 0 to 1"));
assertThat(explanation.getDetails().length, equalTo(2)); // TODO: Enable when changes from BlockJoinQuery#explain are added to Lucene (Most likely version 4.2)
assertThat(explanation.getDetails()[0].getValue(), equalTo(1f)); // assertThat(explanation.getDetails().length, equalTo(2));
assertThat(explanation.getDetails()[0].getDescription(), equalTo("Child[0]")); // assertThat(explanation.getDetails()[0].getValue(), equalTo(1f));
assertThat(explanation.getDetails()[1].getValue(), equalTo(1f)); // assertThat(explanation.getDetails()[0].getDescription(), equalTo("Child[0]"));
assertThat(explanation.getDetails()[1].getDescription(), equalTo("Child[1]")); // assertThat(explanation.getDetails()[1].getValue(), equalTo(1f));
// assertThat(explanation.getDetails()[1].getDescription(), equalTo("Child[1]"));
} }
} }

View File

@ -584,8 +584,15 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
client.admin().indices().prepareRefresh().execute().actionGet(); client.admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client.prepareSearch("test") SearchResponse searchResponse = client.prepareSearch("test")
.setQuery(topChildrenQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).scope("child1")) .setQuery(
.addFacet(termsFacet("facet1").field("c_field").scope("child1")) topChildrenQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")))
)
.addFacet(
termsFacet("facet1")
.facetFilter(boolFilter().should(termFilter("c_field", "red")).should(termFilter("c_field", "yellow")))
.field("c_field")
.global(true)
)
.execute().actionGet(); .execute().actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0)); assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.failedShards(), equalTo(0)); assertThat(searchResponse.failedShards(), equalTo(0));