Aggregations: Allow aggregation_binary to build and parse
Previously AggregationBuilder would wrap binary_aggregations in an aggregations object which would break parsing. This has been fixed so that for normally specified aggregations there are wrapped in an `aggregations` object, for binary aggregation which have the same XContentType as the builder it will use an `aggregations` field name and use the aggregationsBinary as the value (this will render the same as normal aggregations), and for binary aggregation with a different ContentType from the builder we use an `aggregations_binary` field name and add the aggregationsBinary as a binary value. Additionally the logic in AggregationParsers needed to be changed as it previously did not parse `aggregations_binary` fields in sub-aggregations. A check has been added for the `aggregations_binary` field name and the binaryValue of this field is used to create a new parser and create the correct AggregatorFactories. Close #11457
This commit is contained in:
parent
f85a17ff1a
commit
39a20c3b5b
|
@ -122,12 +122,13 @@ public abstract class AggregationBuilder<B extends AggregationBuilder<B>> extend
|
|||
internalXContent(builder, params);
|
||||
|
||||
if (aggregations != null || aggregationsBinary != null) {
|
||||
builder.startObject("aggregations");
|
||||
|
||||
if (aggregations != null) {
|
||||
builder.startObject("aggregations");
|
||||
for (AbstractAggregationBuilder subAgg : aggregations) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
if (aggregationsBinary != null) {
|
||||
|
@ -138,7 +139,6 @@ public abstract class AggregationBuilder<B extends AggregationBuilder<B>> extend
|
|||
}
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
return builder.endObject();
|
||||
|
|
|
@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableMap;
|
|||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -140,45 +141,66 @@ public class AggregatorParsers {
|
|||
final String fieldName = parser.currentName();
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT + "] under [" + fieldName
|
||||
+ "], but got a [" + token + "] in [" + aggregationName + "]", parser.getTokenLocation());
|
||||
}
|
||||
|
||||
switch (fieldName) {
|
||||
if ("aggregations_binary".equals(fieldName)) {
|
||||
if (subFactories != null) {
|
||||
throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]",
|
||||
parser.getTokenLocation());
|
||||
}
|
||||
XContentParser binaryParser = null;
|
||||
if (token == XContentParser.Token.VALUE_STRING || token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
byte[] source = parser.binaryValue();
|
||||
binaryParser = XContentFactory.xContent(source).createParser(source);
|
||||
} else {
|
||||
throw new SearchParseException(context, "Expected [" + XContentParser.Token.VALUE_STRING + " or "
|
||||
+ XContentParser.Token.VALUE_EMBEDDED_OBJECT + "] for [" + fieldName + "], but got a [" + token + "] in ["
|
||||
+ aggregationName + "]", parser.getTokenLocation());
|
||||
}
|
||||
XContentParser.Token binaryToken = binaryParser.nextToken();
|
||||
if (binaryToken != XContentParser.Token.START_OBJECT) {
|
||||
throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT
|
||||
+ "] as first token when parsing [" + fieldName + "], but got a [" + binaryToken + "] in ["
|
||||
+ aggregationName + "]", parser.getTokenLocation());
|
||||
}
|
||||
subFactories = parseAggregators(binaryParser, context, level + 1);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
switch (fieldName) {
|
||||
case "meta":
|
||||
metaData = parser.map();
|
||||
break;
|
||||
case "aggregations":
|
||||
case "aggs":
|
||||
if (subFactories != null) {
|
||||
throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]",
|
||||
parser.getTokenLocation());
|
||||
throw new SearchParseException(context,
|
||||
"Found two sub aggregation definitions under [" + aggregationName + "]", parser.getTokenLocation());
|
||||
}
|
||||
subFactories = parseAggregators(parser, context, level+1);
|
||||
subFactories = parseAggregators(parser, context, level + 1);
|
||||
break;
|
||||
default:
|
||||
if (aggFactory != null) {
|
||||
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: ["
|
||||
+ aggFactory.type + "] and [" + fieldName + "]", parser.getTokenLocation());
|
||||
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName
|
||||
+ "]: [" + aggFactory.type + "] and [" + fieldName + "]", parser.getTokenLocation());
|
||||
}
|
||||
if (pipelineAggregatorFactory != null) {
|
||||
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: ["
|
||||
+ pipelineAggregatorFactory + "] and [" + fieldName + "]", parser.getTokenLocation());
|
||||
if (pipelineAggregatorFactory != null) {
|
||||
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName
|
||||
+ "]: [" + pipelineAggregatorFactory + "] and [" + fieldName + "]", parser.getTokenLocation());
|
||||
}
|
||||
|
||||
Aggregator.Parser aggregatorParser = parser(fieldName);
|
||||
if (aggregatorParser == null) {
|
||||
PipelineAggregator.Parser pipelineAggregatorParser = pipelineAggregator(fieldName);
|
||||
if (pipelineAggregatorParser == null) {
|
||||
PipelineAggregator.Parser pipelineAggregatorParser = pipelineAggregator(fieldName);
|
||||
if (pipelineAggregatorParser == null) {
|
||||
throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in ["
|
||||
+ aggregationName + "]", parser.getTokenLocation());
|
||||
+ aggregationName + "]", parser.getTokenLocation());
|
||||
} else {
|
||||
pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parser, context);
|
||||
pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parser, context);
|
||||
}
|
||||
} else {
|
||||
aggFactory = aggregatorParser.parse(aggregationName, parser, context);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT + "] under [" + fieldName
|
||||
+ "], but got a [" + token + "] in [" + aggregationName + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,142 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.core.IsNull.notNullValue;
|
||||
|
||||
@ElasticsearchIntegrationTest.SuiteScopeTest
|
||||
public class AggregationsBinaryTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
private static final String STRING_FIELD_NAME = "s_value";
|
||||
private static final String INT_FIELD_NAME = "i_value";
|
||||
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
createIndex("idx");
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
builders.add(client().prepareIndex("idx", "type").setSource(
|
||||
jsonBuilder().startObject().field(STRING_FIELD_NAME, "val" + i).field(INT_FIELD_NAME, i).endObject()));
|
||||
}
|
||||
indexRandom(true, builders);
|
||||
ensureSearchable();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAggregationsBinary() throws Exception {
|
||||
TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME);
|
||||
TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME);
|
||||
|
||||
// Create an XContentBuilder from sub aggregation
|
||||
XContentBuilder subTermContentBuilder = JsonXContent.contentBuilder().startObject();
|
||||
subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
subTermContentBuilder.endObject();
|
||||
|
||||
// Add sub aggregation as a XContentBuilder (binary_aggregation)
|
||||
termsBuilder.subAggregation(subTermContentBuilder);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
Terms terms = response.getAggregations().get("terms");
|
||||
assertThat(terms, notNullValue());
|
||||
assertThat(terms.getName(), equalTo("terms"));
|
||||
assertThat(terms.getBuckets().size(), equalTo(5));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat(bucket.getKeyAsString(), equalTo("val" + i));
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
Aggregations subAggs = bucket.getAggregations();
|
||||
assertThat(subAggs, notNullValue());
|
||||
assertThat(subAggs.asList().size(), equalTo(1));
|
||||
Terms subTerms = subAggs.get("subterms");
|
||||
assertThat(subTerms, notNullValue());
|
||||
List<Bucket> subTermsBuckets = subTerms.getBuckets();
|
||||
assertThat(subTermsBuckets, notNullValue());
|
||||
assertThat(subTermsBuckets.size(), equalTo(1));
|
||||
assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i));
|
||||
assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAggregationsBinarySameContentType() throws Exception {
|
||||
TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME);
|
||||
TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME);
|
||||
|
||||
// Create an XContentBuilder from sub aggregation
|
||||
|
||||
XContentBuilder subTermContentBuilder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
|
||||
subTermContentBuilder.startObject();
|
||||
subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
subTermContentBuilder.endObject();
|
||||
|
||||
// Add sub aggregation as a XContentBuilder (binary_aggregation)
|
||||
termsBuilder.subAggregation(subTermContentBuilder);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
Terms terms = response.getAggregations().get("terms");
|
||||
assertThat(terms, notNullValue());
|
||||
assertThat(terms.getName(), equalTo("terms"));
|
||||
assertThat(terms.getBuckets().size(), equalTo(5));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat(bucket.getKeyAsString(), equalTo("val" + i));
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
Aggregations subAggs = bucket.getAggregations();
|
||||
assertThat(subAggs, notNullValue());
|
||||
assertThat(subAggs.asList().size(), equalTo(1));
|
||||
Terms subTerms = subAggs.get("subterms");
|
||||
assertThat(subTerms, notNullValue());
|
||||
List<Bucket> subTermsBuckets = subTerms.getBuckets();
|
||||
assertThat(subTermsBuckets, notNullValue());
|
||||
assertThat(subTermsBuckets.size(), equalTo(1));
|
||||
assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i));
|
||||
assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l));
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue