Merge pull request #24824 from elastic/feature/client_aggs_parsing

Add aggs parsers for high level REST Client
This commit is contained in:
Luca Cavanna 2017-05-22 14:43:40 +02:00 committed by GitHub
commit c584c2fd71
138 changed files with 7134 additions and 496 deletions

View File

@ -1,7 +1,7 @@
<?xml version="1.0"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
<suppressions>
<!-- On Windows, Checkstyle matches files using \ path separator -->
@ -795,4 +795,4 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]hamcrest[/\\]ElasticsearchAssertions.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSDirectoryService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSIndexStore.java" checks="LineLength" />
</suppressions>
</suppressions>

View File

@ -21,32 +21,46 @@ package org.elasticsearch.action.search;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure;
import static org.elasticsearch.search.internal.InternalSearchResponse.readInternalSearchResponse;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* A response of a search request.
*/
public class SearchResponse extends ActionResponse implements StatusToXContentObject {
private InternalSearchResponse internalResponse;
private static final ParseField SCROLL_ID = new ParseField("_scroll_id");
private static final ParseField TOOK = new ParseField("took");
private static final ParseField TIMED_OUT = new ParseField("timed_out");
private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early");
private static final ParseField NUM_REDUCE_PHASES = new ParseField("num_reduce_phases");
private SearchResponseSections internalResponse;
private String scrollId;
@ -61,7 +75,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
public SearchResponse() {
}
public SearchResponse(InternalSearchResponse internalResponse, String scrollId, int totalShards, int successfulShards,
public SearchResponse(SearchResponseSections internalResponse, String scrollId, int totalShards, int successfulShards,
long tookInMillis, ShardSearchFailure[] shardFailures) {
this.internalResponse = internalResponse;
this.scrollId = scrollId;
@ -176,7 +190,8 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
*
* @return The profile results or an empty map
*/
@Nullable public Map<String, ProfileShardResult> getProfileResults() {
@Nullable
public Map<String, ProfileShardResult> getProfileResults() {
return internalResponse.profile();
}
@ -190,15 +205,15 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (scrollId != null) {
builder.field("_scroll_id", scrollId);
builder.field(SCROLL_ID.getPreferredName(), scrollId);
}
builder.field("took", tookInMillis);
builder.field("timed_out", isTimedOut());
builder.field(TOOK.getPreferredName(), tookInMillis);
builder.field(TIMED_OUT.getPreferredName(), isTimedOut());
if (isTerminatedEarly() != null) {
builder.field("terminated_early", isTerminatedEarly());
builder.field(TERMINATED_EARLY.getPreferredName(), isTerminatedEarly());
}
if (getNumReducePhases() != 1) {
builder.field("num_reduce_phases", getNumReducePhases());
builder.field(NUM_REDUCE_PHASES.getPreferredName(), getNumReducePhases());
}
RestActions.buildBroadcastShardsHeader(builder, params, getTotalShards(), getSuccessfulShards(), getFailedShards(),
getShardFailures());
@ -206,10 +221,89 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
return builder;
}
public static SearchResponse fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
XContentParser.Token token;
String currentFieldName = null;
SearchHits hits = null;
Aggregations aggs = null;
Suggest suggest = null;
SearchProfileShardResults profile = null;
boolean timedOut = false;
Boolean terminatedEarly = null;
int numReducePhases = 1;
long tookInMillis = -1;
int successfulShards = -1;
int totalShards = -1;
String scrollId = null;
List<ShardSearchFailure> failures = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SCROLL_ID.match(currentFieldName)) {
scrollId = parser.text();
} else if (TOOK.match(currentFieldName)) {
tookInMillis = parser.longValue();
} else if (TIMED_OUT.match(currentFieldName)) {
timedOut = parser.booleanValue();
} else if (TERMINATED_EARLY.match(currentFieldName)) {
terminatedEarly = parser.booleanValue();
} else if (NUM_REDUCE_PHASES.match(currentFieldName)) {
numReducePhases = parser.intValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SearchHits.Fields.HITS.equals(currentFieldName)) {
hits = SearchHits.fromXContent(parser);
} else if (Aggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) {
aggs = Aggregations.fromXContent(parser);
} else if (Suggest.NAME.equals(currentFieldName)) {
suggest = Suggest.fromXContent(parser);
} else if (SearchProfileShardResults.PROFILE_FIELD.equals(currentFieldName)) {
profile = SearchProfileShardResults.fromXContent(parser);
} else if (RestActions._SHARDS_FIELD.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (RestActions.FAILED_FIELD.match(currentFieldName)) {
parser.intValue(); // we don't need it but need to consume it
} else if (RestActions.SUCCESSFUL_FIELD.match(currentFieldName)) {
successfulShards = parser.intValue();
} else if (RestActions.TOTAL_FIELD.match(currentFieldName)) {
totalShards = parser.intValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (RestActions.FAILURES_FIELD.match(currentFieldName)) {
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
failures.add(ShardSearchFailure.fromXContent(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
}
}
SearchResponseSections searchResponseSections = new SearchResponseSections(hits, aggs, suggest, timedOut, terminatedEarly,
profile, numReducePhases);
return new SearchResponse(searchResponseSections, scrollId, totalShards, successfulShards, tookInMillis,
failures.toArray(new ShardSearchFailure[failures.size()]));
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
internalResponse = readInternalSearchResponse(in);
internalResponse = new InternalSearchResponse(in);
totalShards = in.readVInt();
successfulShards = in.readVInt();
int size = in.readVInt();

View File

@ -0,0 +1,122 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
/**
* Base class that holds the various sections which a search response is
* composed of (hits, aggs, suggestions etc.) and allows to retrieve them.
*
* The reason why this class exists is that the high level REST client uses its own classes
* to parse aggregations into, which are not serializable. This is the common part that can be
* shared between core and client.
*/
public class SearchResponseSections implements ToXContent {
protected final SearchHits hits;
protected final Aggregations aggregations;
protected final Suggest suggest;
protected final SearchProfileShardResults profileResults;
protected final boolean timedOut;
protected final Boolean terminatedEarly;
protected final int numReducePhases;
public SearchResponseSections(SearchHits hits, Aggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly,
SearchProfileShardResults profileResults, int numReducePhases) {
this.hits = hits;
this.aggregations = aggregations;
this.suggest = suggest;
this.profileResults = profileResults;
this.timedOut = timedOut;
this.terminatedEarly = terminatedEarly;
this.numReducePhases = numReducePhases;
}
public final boolean timedOut() {
return this.timedOut;
}
public final Boolean terminatedEarly() {
return this.terminatedEarly;
}
public final SearchHits hits() {
return hits;
}
public final Aggregations aggregations() {
return aggregations;
}
public final Suggest suggest() {
return suggest;
}
/**
* Returns the number of reduce phases applied to obtain this search response
*/
public final int getNumReducePhases() {
return numReducePhases;
}
/**
* Returns the profile results for this search response (including all shards).
* An empty map is returned if profiling was not enabled
*
* @return Profile results
*/
public final Map<String, ProfileShardResult> profile() {
if (profileResults == null) {
return Collections.emptyMap();
}
return profileResults.getShardResults();
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
hits.toXContent(builder, params);
if (aggregations != null) {
aggregations.toXContent(builder, params);
}
if (suggest != null) {
suggest.toXContent(builder, params);
}
if (profileResults != null) {
profileResults.toXContent(builder, params);
}
return builder;
}
protected void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException();
}
}

View File

@ -395,6 +395,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
FLOAT(VALUE_NUMBER, VALUE_STRING),
FLOAT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
DOUBLE(VALUE_NUMBER, VALUE_STRING),
DOUBLE_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
LONG(VALUE_NUMBER, VALUE_STRING),
LONG_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
INT(VALUE_NUMBER, VALUE_STRING),
@ -411,7 +412,8 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
OBJECT_OR_STRING(START_OBJECT, VALUE_STRING),
OBJECT_ARRAY_BOOLEAN_OR_STRING(START_OBJECT, START_ARRAY, VALUE_BOOLEAN, VALUE_STRING),
OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING),
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING);
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING),
VALUE_OBJECT_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING, START_OBJECT, START_ARRAY);
private final EnumSet<XContentParser.Token> tokens;

View File

@ -111,10 +111,9 @@ public final class XContentParserUtils {
}
/**
* This method expects that the current token is a {@code XContentParser.Token.FIELD_NAME} and
* that the current field name is the concatenation of a type, delimiter and name (ex: terms#foo
* where "terms" refers to the type of a registered {@link NamedXContentRegistry.Entry}, "#" is
* the delimiter and "foo" the name of the object to parse).
* This method expects that the current field name is the concatenation of a type, a delimiter and a name
* (ex: terms#foo where "terms" refers to the type of a registered {@link NamedXContentRegistry.Entry},
* "#" is the delimiter and "foo" the name of the object to parse).
*
* The method splits the field's name to extract the type and name and then parses the object
* using the {@link XContentParser#namedObject(Class, String, Object)} method.
@ -128,7 +127,6 @@ public final class XContentParserUtils {
* from the field's name
*/
public static <T> T parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String currentFieldName = parser.currentName();
if (Strings.hasLength(currentFieldName)) {
int position = currentFieldName.indexOf(delimiter);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContent.Params;
@ -46,6 +47,12 @@ import java.util.List;
public class RestActions {
public static final ParseField _SHARDS_FIELD = new ParseField("_shards");
public static final ParseField TOTAL_FIELD = new ParseField("total");
public static final ParseField SUCCESSFUL_FIELD = new ParseField("successful");
public static final ParseField FAILED_FIELD = new ParseField("failed");
public static final ParseField FAILURES_FIELD = new ParseField("failures");
public static long parseVersion(RestRequest request) {
if (request.hasParam("version")) {
return request.paramAsLong("version", Versions.MATCH_ANY);
@ -71,12 +78,12 @@ public class RestActions {
public static void buildBroadcastShardsHeader(XContentBuilder builder, Params params,
int total, int successful, int failed,
ShardOperationFailedException[] shardFailures) throws IOException {
builder.startObject("_shards");
builder.field("total", total);
builder.field("successful", successful);
builder.field("failed", failed);
builder.startObject(_SHARDS_FIELD.getPreferredName());
builder.field(TOTAL_FIELD.getPreferredName(), total);
builder.field(SUCCESSFUL_FIELD.getPreferredName(), successful);
builder.field(FAILED_FIELD.getPreferredName(), failed);
if (shardFailures != null && shardFailures.length > 0) {
builder.startArray("failures");
builder.startArray(FAILURES_FIELD.getPreferredName());
final boolean group = params.paramAsBoolean("group_shard_failures", true); // we group by default
for (ShardOperationFailedException shardFailure : group ? ExceptionsHelper.groupBy(shardFailures) : shardFailures) {
builder.startObject();

View File

@ -105,10 +105,10 @@ public final class SearchHits implements Streamable, ToXContent, Iterable<Search
return this.hits;
}
static final class Fields {
static final String HITS = "hits";
static final String TOTAL = "total";
static final String MAX_SCORE = "max_score";
public static final class Fields {
public static final String HITS = "hits";
public static final String TOTAL = "total";
public static final String MAX_SCORE = "max_score";
}
@Override

View File

@ -19,13 +19,14 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContent;
import java.util.Map;
/**
* An aggregation
* An aggregation. Extends {@link ToXContent} as it makes it easier to print out its content.
*/
public interface Aggregation {
public interface Aggregation extends ToXContent {
/**
* Delimiter used when prefixing aggregation names with their type
@ -38,6 +39,13 @@ public interface Aggregation {
*/
String getName();
/**
* @return a string representing the type of the aggregation. This type is added to
* the aggregation name in the response, so that it can later be used by clients
* to determine type of the aggregation and parse it into the proper object.
*/
String getType();
/**
* Get the optional byte array metadata that was set on the aggregation
*/

View File

@ -18,6 +18,13 @@
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@ -30,7 +37,9 @@ import static java.util.Collections.unmodifiableMap;
/**
* Represents a set of {@link Aggregation}s
*/
public abstract class Aggregations implements Iterable<Aggregation> {
public class Aggregations implements Iterable<Aggregation>, ToXContent {
public static final String AGGREGATIONS_FIELD = "aggregations";
protected List<? extends Aggregation> aggregations = Collections.emptyList();
protected Map<String, Aggregation> aggregationsAsMap;
@ -38,7 +47,7 @@ public abstract class Aggregations implements Iterable<Aggregation> {
protected Aggregations() {
}
protected Aggregations(List<? extends Aggregation> aggregations) {
public Aggregations(List<? extends Aggregation> aggregations) {
this.aggregations = aggregations;
}
@ -98,4 +107,35 @@ public abstract class Aggregations implements Iterable<Aggregation> {
public final int hashCode() {
return Objects.hash(getClass(), aggregations);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (aggregations.isEmpty()) {
return builder;
}
builder.startObject(AGGREGATIONS_FIELD);
toXContentInternal(builder, params);
return builder.endObject();
}
/**
* Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs)
*/
public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException {
for (Aggregation aggregation : aggregations) {
aggregation.toXContent(builder, params);
}
return builder;
}
public static Aggregations fromXContent(XContentParser parser) throws IOException {
final List<Aggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
return new Aggregations(aggregations);
}
}

View File

@ -169,12 +169,8 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na
return pipelineAggregators;
}
/**
* Returns a string representing the type of the aggregation. This type is added to
* the aggregation name in the response, so that it can later be used by REST clients
* to determine the internal type of the aggregation.
*/
protected String getType() {
@Override
public String getType() {
return getWriteableName();
}

View File

@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import java.io.IOException;
@ -32,6 +31,7 @@ import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyMap;
/**
* An internal implementation of {@link Aggregations}.
*/
@ -80,27 +80,6 @@ public final class InternalAggregations extends Aggregations implements ToXConte
return new InternalAggregations(reducedAggregations);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (aggregations.isEmpty()) {
return builder;
}
builder.startObject("aggregations");
toXContentInternal(builder, params);
return builder.endObject();
}
/**
* Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs)
*/
public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException {
for (Aggregation aggregation : aggregations) {
((InternalAggregation)aggregation).toXContent(builder, params);
}
return builder;
}
public static InternalAggregations readAggregations(StreamInput in) throws IOException {
InternalAggregations result = new InternalAggregations();
result.readFrom(in);

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
/**
* An implementation of {@link Aggregation} that is parsed from a REST response.
* Serves as a base class for all aggregation implementations that are parsed from REST.
*/
public abstract class ParsedAggregation implements Aggregation, ToXContent {
protected static void declareAggregationFields(ObjectParser<? extends ParsedAggregation, Void> objectParser) {
objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata),
(parser, context) -> parser.map(), InternalAggregation.CommonFields.META);
}
private String name;
protected Map<String, Object> metadata;
@Override
public final String getName() {
return name;
}
protected void setName(String name) {
this.name = name;
}
@Override
public final Map<String, Object> getMetaData() {
return metadata;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
// Concatenates the type and the name of the aggregation (ex: top_hits#foo)
builder.startObject(String.join(InternalAggregation.TYPED_KEYS_DELIMITER, getType(), name));
if (this.metadata != null) {
builder.field(InternalAggregation.CommonFields.META.getPreferredName());
builder.map(this.metadata);
}
doXContentBody(builder, params);
builder.endObject();
return builder;
}
protected abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException;
/**
* Parse a token of type XContentParser.Token.VALUE_NUMBER or XContentParser.Token.STRING to a double.
* In other cases the default value is returned instead.
*/
protected static double parseDouble(XContentParser parser, double defaultNullValue) throws IOException {
Token currentToken = parser.currentToken();
if (currentToken == XContentParser.Token.VALUE_NUMBER || currentToken == XContentParser.Token.VALUE_STRING) {
return parser.doubleValue();
} else {
return defaultNullValue;
}
}
}

View File

@ -0,0 +1,181 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public abstract class ParsedMultiBucketAggregation<B extends ParsedMultiBucketAggregation.Bucket>
extends ParsedAggregation implements MultiBucketsAggregation {
protected final List<B> buckets = new ArrayList<>();
protected boolean keyed = false;
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
if (keyed) {
builder.startObject(CommonFields.BUCKETS.getPreferredName());
} else {
builder.startArray(CommonFields.BUCKETS.getPreferredName());
}
for (B bucket : buckets) {
bucket.toXContent(builder, params);
}
if (keyed) {
builder.endObject();
} else {
builder.endArray();
}
return builder;
}
protected static void declareMultiBucketAggregationFields(final ObjectParser<? extends ParsedMultiBucketAggregation, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> keyedBucketParser) {
declareAggregationFields(objectParser);
objectParser.declareField((parser, aggregation, context) -> {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
aggregation.keyed = true;
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
aggregation.buckets.add(keyedBucketParser.apply(parser));
}
} else if (token == XContentParser.Token.START_ARRAY) {
aggregation.keyed = false;
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
aggregation.buckets.add(bucketParser.apply(parser));
}
}
}, CommonFields.BUCKETS, ObjectParser.ValueType.OBJECT_ARRAY);
}
public abstract static class ParsedBucket implements MultiBucketsAggregation.Bucket {
private Aggregations aggregations;
private String keyAsString;
private long docCount;
private boolean keyed;
protected void setKeyAsString(String keyAsString) {
this.keyAsString = keyAsString;
}
@Override
public String getKeyAsString() {
return keyAsString;
}
protected void setDocCount(long docCount) {
this.docCount = docCount;
}
@Override
public long getDocCount() {
return docCount;
}
public void setKeyed(boolean keyed) {
this.keyed = keyed;
}
protected boolean isKeyed() {
return keyed;
}
protected void setAggregations(Aggregations aggregations) {
this.aggregations = aggregations;
}
@Override
public Aggregations getAggregations() {
return aggregations;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (keyed) {
// Subclasses can override the getKeyAsString method to handle specific cases like
// keyed bucket with RAW doc value format where the key_as_string field is not printed
// out but we still need to have a string version of the key to use as the bucket's name.
builder.startObject(getKeyAsString());
} else {
builder.startObject();
}
if (keyAsString != null) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
}
keyToXContent(builder);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
return builder;
}
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
}
protected static <B extends ParsedBucket> B parseXContent(final XContentParser parser,
final boolean keyed,
final Supplier<B> bucketSupplier,
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
throws IOException {
final B bucket = bucketSupplier.get();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* A base class for all the single bucket aggregations.
*/
public abstract class ParsedSingleBucketAggregation extends ParsedAggregation implements SingleBucketAggregation {
private long docCount;
protected Aggregations aggregations = new Aggregations(Collections.emptyList());
@Override
public long getDocCount() {
return docCount;
}
protected void setDocCount(long docCount) {
this.docCount = docCount;
}
@Override
public Aggregations getAggregations() {
return aggregations;
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
return builder;
}
protected static <T extends ParsedSingleBucketAggregation> T parseXContent(final XContentParser parser, T aggregation, String name)
throws IOException {
aggregation.setName(name);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (token == XContentParser.Token.FIELD_NAME) {
token = parser.nextToken();
}
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
aggregation.setDocCount(parser.longValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (CommonFields.META.getPreferredName().equals(currentFieldName)) {
aggregation.metadata = parser.map();
} else {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
}
aggregation.aggregations = new Aggregations(aggregations);
return aggregation;
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.adjacency;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ParsedAdjacencyMatrix extends ParsedMultiBucketAggregation<ParsedAdjacencyMatrix.ParsedBucket> implements AdjacencyMatrix {
private Map<String, ParsedBucket> bucketMap;
@Override
public String getType() {
return AdjacencyMatrixAggregationBuilder.NAME;
}
@Override
public List<? extends AdjacencyMatrix.Bucket> getBuckets() {
return buckets;
}
@Override
public ParsedBucket getBucketByKey(String key) {
if (bucketMap == null) {
bucketMap = new HashMap<>(buckets.size());
for (ParsedBucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
}
}
return bucketMap.get(key);
}
private static ObjectParser<ParsedAdjacencyMatrix, Void> PARSER =
new ObjectParser<>(ParsedAdjacencyMatrix.class.getSimpleName(), true, ParsedAdjacencyMatrix::new);
static {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser),
parser -> ParsedBucket.fromXContent(parser));
}
public static ParsedAdjacencyMatrix fromXContent(XContentParser parser, String name) throws IOException {
ParsedAdjacencyMatrix aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements AdjacencyMatrix.Bucket {
private String key;
@Override
public String getKey() {
return key;
}
@Override
public String getKeyAsString() {
return key;
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseXContent(parser, false, ParsedBucket::new, (p, bucket) -> bucket.key = p.text());
}
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedFilter extends ParsedSingleBucketAggregation implements Filter {
@Override
public String getType() {
return FilterAggregationBuilder.NAME;
}
public static ParsedFilter fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedFilter(), name);
}
}

View File

@ -0,0 +1,141 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.filters;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class ParsedFilters extends ParsedMultiBucketAggregation<ParsedFilters.ParsedBucket> implements Filters {
private Map<String, ParsedBucket> bucketMap;
@Override
public String getType() {
return FiltersAggregationBuilder.NAME;
}
@Override
public List<? extends Filters.Bucket> getBuckets() {
return buckets;
}
@Override
public ParsedBucket getBucketByKey(String key) {
if (bucketMap == null) {
bucketMap = new HashMap<>(buckets.size());
for (ParsedBucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
}
}
return bucketMap.get(key);
}
private static ObjectParser<ParsedFilters, Void> PARSER =
new ObjectParser<>(ParsedFilters.class.getSimpleName(), true, ParsedFilters::new);
static {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedFilters fromXContent(XContentParser parser, String name) throws IOException {
ParsedFilters aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
// in case this is not a keyed aggregation, we need to add numeric keys to the buckets
if (aggregation.keyed == false) {
int i = 0;
for (ParsedBucket bucket : aggregation.buckets) {
assert bucket.key == null;
bucket.key = String.valueOf(i);
i++;
}
}
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Filters.Bucket {
private String key;
@Override
public String getKey() {
return key;
}
@Override
public String getKeyAsString() {
return key;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (isKeyed()) {
builder.startObject(key);
} else {
builder.startObject();
}
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
final ParsedBucket bucket = new ParsedBucket();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
bucket.key = currentFieldName;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.List;
public class ParsedGeoHashGrid extends ParsedMultiBucketAggregation<ParsedGeoHashGrid.ParsedBucket> implements GeoHashGrid {
@Override
public String getType() {
return GeoGridAggregationBuilder.NAME;
}
@Override
public List<? extends GeoHashGrid.Bucket> getBuckets() {
return buckets;
}
private static ObjectParser<ParsedGeoHashGrid, Void> PARSER =
new ObjectParser<>(ParsedGeoHashGrid.class.getSimpleName(), true, ParsedGeoHashGrid::new);
static {
declareMultiBucketAggregationFields(PARSER, ParsedBucket::fromXContent, ParsedBucket::fromXContent);
}
public static ParsedGeoHashGrid fromXContent(XContentParser parser, String name) throws IOException {
ParsedGeoHashGrid aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements GeoHashGrid.Bucket {
private String geohashAsString;
@Override
public GeoPoint getKey() {
return GeoPoint.fromGeohash(geohashAsString);
}
@Override
public String getKeyAsString() {
return geohashAsString;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), geohashAsString);
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseXContent(parser, false, ParsedBucket::new, (p, bucket) -> bucket.geohashAsString = p.textOrNull());
}
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.global;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedGlobal extends ParsedSingleBucketAggregation implements Global {
@Override
public String getType() {
return GlobalAggregationBuilder.NAME;
}
public static ParsedGlobal fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedGlobal(), name);
}
}

View File

@ -0,0 +1,91 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.List;
public class ParsedDateHistogram extends ParsedMultiBucketAggregation<ParsedDateHistogram.ParsedBucket> implements Histogram {
@Override
public String getType() {
return DateHistogramAggregationBuilder.NAME;
}
@Override
public List<? extends Histogram.Bucket> getBuckets() {
return buckets;
}
private static ObjectParser<ParsedDateHistogram, Void> PARSER =
new ObjectParser<>(ParsedDateHistogram.class.getSimpleName(), true, ParsedDateHistogram::new);
static {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedDateHistogram fromXContent(XContentParser parser, String name) throws IOException {
ParsedDateHistogram aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
private Long key;
@Override
public Object getKey() {
if (key != null) {
return new DateTime(key, DateTimeZone.UTC);
}
return null;
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return Long.toString(key);
}
return null;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), key);
}
static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
}
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.List;
public class ParsedHistogram extends ParsedMultiBucketAggregation<ParsedHistogram.ParsedBucket> implements Histogram {
@Override
public String getType() {
return HistogramAggregationBuilder.NAME;
}
@Override
public List<? extends Histogram.Bucket> getBuckets() {
return buckets;
}
private static ObjectParser<ParsedHistogram, Void> PARSER =
new ObjectParser<>(ParsedHistogram.class.getSimpleName(), true, ParsedHistogram::new);
static {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedHistogram fromXContent(XContentParser parser, String name) throws IOException {
ParsedHistogram aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
private Double key;
@Override
public Object getKey() {
return key;
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return Double.toString(key);
}
return null;
}
static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException {
return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue());
}
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.missing;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedMissing extends ParsedSingleBucketAggregation implements Missing {
@Override
public String getType() {
return MissingAggregationBuilder.NAME;
}
public static ParsedMissing fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedMissing(), name);
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedNested extends ParsedSingleBucketAggregation implements Nested {
@Override
public String getType() {
return NestedAggregationBuilder.NAME;
}
public static ParsedNested fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedNested(), name);
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedReverseNested extends ParsedSingleBucketAggregation implements Nested {
@Override
public String getType() {
return ReverseNestedAggregationBuilder.NAME;
}
public static ParsedReverseNested fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedReverseNested(), name);
}
}

View File

@ -44,6 +44,7 @@ import static java.util.Collections.unmodifiableList;
public final class InternalBinaryRange
extends InternalMultiBucketAggregation<InternalBinaryRange, InternalBinaryRange.Bucket>
implements Range {
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Range.Bucket {
private final transient DocValueFormat format;

View File

@ -0,0 +1,168 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class ParsedBinaryRange extends ParsedMultiBucketAggregation<ParsedBinaryRange.ParsedBucket> implements Range {
@Override
public String getType() {
return IpRangeAggregationBuilder.NAME;
}
@Override
public List<? extends Range.Bucket> getBuckets() {
return buckets;
}
private static ObjectParser<ParsedBinaryRange, Void> PARSER =
new ObjectParser<>(ParsedBinaryRange.class.getSimpleName(), true, ParsedBinaryRange::new);
static {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedBinaryRange fromXContent(XContentParser parser, String name) throws IOException {
ParsedBinaryRange aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Range.Bucket {
private String key;
private String from;
private String to;
@Override
public Object getKey() {
return key;
}
@Override
public String getKeyAsString() {
return key;
}
@Override
public Object getFrom() {
return from;
}
@Override
public String getFromAsString() {
return from;
}
@Override
public Object getTo() {
return to;
}
@Override
public String getToAsString() {
return to;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (isKeyed()) {
builder.startObject(key != null ? key : rangeKey(from, to));
} else {
builder.startObject();
if (key != null) {
builder.field(CommonFields.KEY.getPreferredName(), key);
}
}
if (from != null) {
builder.field(CommonFields.FROM.getPreferredName(), getFrom());
}
if (to != null) {
builder.field(CommonFields.TO.getPreferredName(), getTo());
}
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
final ParsedBucket bucket = new ParsedBucket();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
String rangeKey = null;
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
rangeKey = currentFieldName;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
bucket.key = parser.text();
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
} else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) {
bucket.from = parser.text();
} else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) {
bucket.to = parser.text();
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
if (keyed) {
if (rangeKey(bucket.from, bucket.to).equals(rangeKey)) {
bucket.key = null;
} else {
bucket.key = rangeKey;
}
}
return bucket;
}
private static String rangeKey(String from, String to) {
return (from == null ? "*" : from) + '-' + (to == null ? "*" : to);
}
}
}

View File

@ -0,0 +1,193 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class ParsedRange extends ParsedMultiBucketAggregation<ParsedRange.ParsedBucket> implements Range {
@Override
public String getType() {
return RangeAggregationBuilder.NAME;
}
@Override
public List<? extends Range.Bucket> getBuckets() {
return buckets;
}
protected static void declareParsedRangeFields(final ObjectParser<? extends ParsedRange, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> keyedBucketParser) {
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, keyedBucketParser::apply);
}
private static ObjectParser<ParsedRange, Void> PARSER =
new ObjectParser<>(ParsedRange.class.getSimpleName(), true, ParsedRange::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedRange fromXContent(XContentParser parser, String name) throws IOException {
ParsedRange aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Range.Bucket {
protected String key;
protected double from = Double.NEGATIVE_INFINITY;
protected String fromAsString;
protected double to = Double.POSITIVE_INFINITY;
protected String toAsString;
@Override
public String getKey() {
return getKeyAsString();
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
return key;
}
@Override
public Object getFrom() {
return from;
}
@Override
public String getFromAsString() {
if (fromAsString != null) {
return fromAsString;
}
return doubleAsString(from);
}
@Override
public Object getTo() {
return to;
}
@Override
public String getToAsString() {
if (toAsString != null) {
return toAsString;
}
return doubleAsString(to);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (isKeyed()) {
builder.startObject(key);
} else {
builder.startObject();
builder.field(CommonFields.KEY.getPreferredName(), key);
}
if (Double.isInfinite(from) == false) {
builder.field(CommonFields.FROM.getPreferredName(), from);
if (fromAsString != null) {
builder.field(CommonFields.FROM_AS_STRING.getPreferredName(), fromAsString);
}
}
if (Double.isInfinite(to) == false) {
builder.field(CommonFields.TO.getPreferredName(), to);
if (toAsString != null) {
builder.field(CommonFields.TO_AS_STRING.getPreferredName(), toAsString);
}
}
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
private static String doubleAsString(double d) {
return Double.isInfinite(d) ? null : Double.toString(d);
}
protected static <B extends ParsedBucket> B parseRangeBucketXContent(final XContentParser parser,
final Supplier<B> bucketSupplier,
final boolean keyed) throws IOException {
final B bucket = bucketSupplier.get();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
bucket.key = currentFieldName;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
bucket.key = parser.text();
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
} else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) {
bucket.from = parser.doubleValue();
} else if (CommonFields.FROM_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.fromAsString = parser.text();
} else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) {
bucket.to = parser.doubleValue();
} else if (CommonFields.TO_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.toAsString = parser.text();
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
public class ParsedDateRange extends ParsedRange {
@Override
public String getType() {
return DateRangeAggregationBuilder.NAME;
}
private static ObjectParser<ParsedDateRange, Void> PARSER =
new ObjectParser<>(ParsedDateRange.class.getSimpleName(), true, ParsedDateRange::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedDateRange fromXContent(XContentParser parser, String name) throws IOException {
ParsedDateRange aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedRange.ParsedBucket {
@Override
public Object getFrom() {
return doubleAsDateTime(from);
}
@Override
public Object getTo() {
return doubleAsDateTime(to);
}
private static DateTime doubleAsDateTime(Double d) {
if (d == null || Double.isInfinite(d)) {
return null;
}
return new DateTime(d.longValue(), DateTimeZone.UTC);
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
import java.io.IOException;
public class ParsedGeoDistance extends ParsedRange {
@Override
public String getType() {
return GeoDistanceAggregationBuilder.NAME;
}
private static ObjectParser<ParsedGeoDistance, Void> PARSER =
new ObjectParser<>(ParsedGeoDistance.class.getSimpleName(), true, ParsedGeoDistance::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedGeoDistance fromXContent(XContentParser parser, String name) throws IOException {
ParsedGeoDistance aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedRange.ParsedBucket {
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -29,6 +29,8 @@ import java.util.Map;
public class InternalSampler extends InternalSingleBucketAggregation implements Sampler {
public static final String NAME = "mapped_sampler";
// InternalSampler and UnmappedSampler share the same parser name, so we use this when identifying the aggregation type
public static final String PARSER_NAME = "sampler";
InternalSampler(String name, long docCount, InternalAggregations subAggregations, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
@ -48,8 +50,8 @@ public class InternalSampler extends InternalSingleBucketAggregation implements
}
@Override
protected String getType() {
return "sampler";
public String getType() {
return PARSER_NAME;
}
@Override

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.sampler;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import java.io.IOException;
public class ParsedSampler extends ParsedSingleBucketAggregation implements Sampler {
@Override
public String getType() {
return InternalSampler.PARSER_NAME;
}
public static ParsedSampler fromXContent(XContentParser parser, final String name) throws IOException {
return parseXContent(parser, new ParsedSampler(), name);
}
}

View File

@ -44,8 +44,8 @@ import java.util.Objects;
public abstract class InternalSignificantTerms<A extends InternalSignificantTerms<A, B>, B extends InternalSignificantTerms.Bucket<B>>
extends InternalMultiBucketAggregation<A, B> implements SignificantTerms, ToXContent {
private static final String SCORE = "score";
private static final String BG_COUNT = "bg_count";
public static final String SCORE = "score";
public static final String BG_COUNT = "bg_count";
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public abstract static class Bucket<B extends Bucket<B>> extends InternalMultiBucketAggregation.InternalBucket

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class ParsedSignificantLongTerms extends ParsedSignificantTerms {
@Override
public String getType() {
return SignificantLongTerms.NAME;
}
private static ObjectParser<ParsedSignificantLongTerms, Void> PARSER =
new ObjectParser<>(ParsedSignificantLongTerms.class.getSimpleName(), true, ParsedSignificantLongTerms::new);
static {
declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedSignificantLongTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedSignificantLongTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket {
private Long key;
@Override
public Object getKey() {
return key;
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
return Long.toString(key);
}
public Number getKeyAsNumber() {
return key;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), key);
if (super.getKeyAsString() != null) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
}
return builder;
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseSignificantTermsBucketXContent(parser, new ParsedBucket(), (p, bucket) -> bucket.key = p.longValue());
}
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class ParsedSignificantStringTerms extends ParsedSignificantTerms {
@Override
public String getType() {
return SignificantStringTerms.NAME;
}
private static ObjectParser<ParsedSignificantStringTerms, Void> PARSER =
new ObjectParser<>(ParsedSignificantStringTerms.class.getSimpleName(), true, ParsedSignificantStringTerms::new);
static {
declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedSignificantStringTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedSignificantStringTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket {
private BytesRef key;
@Override
public Object getKey() {
return getKeyAsString();
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
return key.utf8ToString();
}
public Number getKeyAsNumber() {
return Double.parseDouble(key.utf8ToString());
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseSignificantTermsBucketXContent(parser, new ParsedBucket(), (p, bucket) -> bucket.key = p.utf8BytesOrNull());
}
}
}

View File

@ -0,0 +1,166 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregation<ParsedSignificantTerms.ParsedBucket>
implements SignificantTerms {
private Map<String, ParsedBucket> bucketMap;
protected long subsetSize;
protected long getSubsetSize() {
return subsetSize;
}
@Override
public List<? extends SignificantTerms.Bucket> getBuckets() {
return buckets;
}
@Override
public SignificantTerms.Bucket getBucketByKey(String term) {
if (bucketMap == null) {
bucketMap = buckets.stream().collect(Collectors.toMap(SignificantTerms.Bucket::getKeyAsString, Function.identity()));
}
return bucketMap.get(term);
}
@Override
public Iterator<SignificantTerms.Bucket> iterator() {
return buckets.stream().map(bucket -> (SignificantTerms.Bucket) bucket).collect(Collectors.toList()).iterator();
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.DOC_COUNT.getPreferredName(), subsetSize);
builder.startArray(CommonFields.BUCKETS.getPreferredName());
for (SignificantTerms.Bucket bucket : buckets) {
bucket.toXContent(builder, params);
}
builder.endArray();
return builder;
}
static void declareParsedSignificantTermsFields(final ObjectParser<? extends ParsedSignificantTerms, Void> objectParser,
final CheckedFunction<XContentParser, ParsedSignificantTerms.ParsedBucket, IOException> bucketParser) {
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply);
objectParser.declareLong((parsedTerms, value) -> parsedTerms.subsetSize = value , CommonFields.DOC_COUNT);
}
public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements SignificantTerms.Bucket {
protected long subsetDf;
protected long supersetDf;
protected double score;
@Override
public long getDocCount() {
return getSubsetDf();
}
@Override
public long getSubsetDf() {
return subsetDf;
}
@Override
public long getSupersetDf() {
return supersetDf;
}
@Override
public double getSignificanceScore() {
return score;
}
@Override
public long getSupersetSize() {
throw new UnsupportedOperationException();
}
@Override
public long getSubsetSize() {
throw new UnsupportedOperationException();
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
keyToXContent(builder);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
builder.field(InternalSignificantTerms.SCORE, getSignificanceScore());
builder.field(InternalSignificantTerms.BG_COUNT, getSupersetDf());
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException;
static <B extends ParsedBucket> B parseSignificantTermsBucketXContent(final XContentParser parser, final B bucket,
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer) throws IOException {
final List<Aggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
String currentFieldName = parser.currentName();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
long value = parser.longValue();
bucket.subsetDf = value;
bucket.setDocCount(value);
} else if (InternalSignificantTerms.SCORE.equals(currentFieldName)) {
bucket.score = parser.longValue();
} else if (InternalSignificantTerms.BG_COUNT.equals(currentFieldName)) {
bucket.supersetDf = parser.longValue();
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
}
}

View File

@ -79,7 +79,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms<UnmappedS
}
@Override
protected String getType() {
public String getType() {
return SignificantStringTerms.NAME;
}

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class ParsedDoubleTerms extends ParsedTerms {
@Override
public String getType() {
return DoubleTerms.NAME;
}
private static ObjectParser<ParsedDoubleTerms, Void> PARSER =
new ObjectParser<>(ParsedDoubleTerms.class.getSimpleName(), true, ParsedDoubleTerms::new);
static {
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedDoubleTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedDoubleTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedTerms.ParsedBucket {
private Double key;
@Override
public Object getKey() {
return key;
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return Double.toString(key);
}
return null;
}
public Number getKeyAsNumber() {
return key;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), key);
if (super.getKeyAsString() != null) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
}
return builder;
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue());
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class ParsedLongTerms extends ParsedTerms {
@Override
public String getType() {
return LongTerms.NAME;
}
private static ObjectParser<ParsedLongTerms, Void> PARSER =
new ObjectParser<>(ParsedLongTerms.class.getSimpleName(), true, ParsedLongTerms::new);
static {
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedLongTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedLongTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedTerms.ParsedBucket {
private Long key;
@Override
public Object getKey() {
return key;
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return Long.toString(key);
}
return null;
}
public Number getKeyAsNumber() {
return key;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), key);
if (super.getKeyAsString() != null) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
}
return builder;
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class ParsedStringTerms extends ParsedTerms {
@Override
public String getType() {
return StringTerms.NAME;
}
private static ObjectParser<ParsedStringTerms, Void> PARSER =
new ObjectParser<>(ParsedStringTerms.class.getSimpleName(), true, ParsedStringTerms::new);
static {
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedStringTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedStringTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedTerms.ParsedBucket {
private BytesRef key;
@Override
public Object getKey() {
return getKeyAsString();
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return key.utf8ToString();
}
return null;
}
public Number getKeyAsNumber() {
if (key != null) {
return Double.parseDouble(key.utf8ToString());
}
return null;
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.utf8BytesOrNull());
}
}
}

View File

@ -0,0 +1,146 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME;
import static org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.SUM_OF_OTHER_DOC_COUNTS;
public abstract class ParsedTerms extends ParsedMultiBucketAggregation<ParsedTerms.ParsedBucket> implements Terms {
protected long docCountErrorUpperBound;
protected long sumOtherDocCount;
@Override
public long getDocCountError() {
return docCountErrorUpperBound;
}
@Override
public long getSumOfOtherDocCounts() {
return sumOtherDocCount;
}
@Override
public List<? extends Terms.Bucket> getBuckets() {
return buckets;
}
@Override
public Terms.Bucket getBucketByKey(String term) {
for (Terms.Bucket bucket : getBuckets()) {
if (bucket.getKeyAsString().equals(term)) {
return bucket;
}
}
return null;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), getSumOfOtherDocCounts());
builder.startArray(CommonFields.BUCKETS.getPreferredName());
for (Terms.Bucket bucket : getBuckets()) {
bucket.toXContent(builder, params);
}
builder.endArray();
return builder;
}
static void declareParsedTermsFields(final ObjectParser<? extends ParsedTerms, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser) {
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply);
objectParser.declareLong((parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value ,
DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME);
objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value,
SUM_OF_OTHER_DOC_COUNTS);
}
public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Terms.Bucket {
boolean showDocCountError = false;
protected long docCountError;
@Override
public long getDocCountError() {
return docCountError;
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
keyToXContent(builder);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
if (showDocCountError) {
builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
}
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
static <B extends ParsedBucket> B parseTermsBucketXContent(final XContentParser parser, final Supplier<B> bucketSupplier,
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
throws IOException {
final B bucket = bucketSupplier.get();
final List<Aggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
String currentFieldName = parser.currentName();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
} else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) {
bucket.docCountError = parser.longValue();
bucket.showDocCountError = true;
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
}
}

View File

@ -74,7 +74,7 @@ public class UnmappedTerms extends InternalTerms<UnmappedTerms, UnmappedTerms.Bu
}
@Override
protected String getType() {
public String getType() {
return StringTerms.NAME;
}

View File

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.search.aggregations.ParsedAggregation;
public abstract class ParsedSingleValueNumericMetricsAggregation extends ParsedAggregation
implements NumericMetricsAggregation.SingleValue {
protected double value;
protected String valueAsString;
@Override
public String getValueAsString() {
if (valueAsString != null) {
return valueAsString;
} else {
return Double.toString(value);
}
}
@Override
public double value() {
return value;
}
protected void setValue(double value) {
this.value = value;
}
protected void setValueAsString(String valueAsString) {
this.valueAsString = valueAsString;
}
protected static void declareSingleValueFields(ObjectParser<? extends ParsedSingleValueNumericMetricsAggregation, Void> objectParser,
double defaultNullValue) {
declareAggregationFields(objectParser);
objectParser.declareField(ParsedSingleValueNumericMetricsAggregation::setValue,
(parser, context) -> parseDouble(parser, defaultNullValue), CommonFields.VALUE, ValueType.DOUBLE_OR_NULL);
objectParser.declareString(ParsedSingleValueNumericMetricsAggregation::setValueAsString, CommonFields.VALUE_AS_STRING);
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.avg;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
public class ParsedAvg extends ParsedSingleValueNumericMetricsAggregation implements Avg {
@Override
public double getValue() {
return value();
}
@Override
public String getType() {
return AvgAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
// InternalAvg renders value only if the avg normalizer (count) is not 0.
// We parse back `null` as Double.POSITIVE_INFINITY so we check for that value here to get the same xContent output
boolean hasValue = value != Double.POSITIVE_INFINITY;
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
return builder;
}
private static final ObjectParser<ParsedAvg, Void> PARSER = new ObjectParser<>(ParsedAvg.class.getSimpleName(), true, ParsedAvg::new);
static {
declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
}
public static ParsedAvg fromXContent(XContentParser parser, final String name) {
ParsedAvg avg = PARSER.apply(parser, null);
avg.setName(name);
return avg;
}
}

View File

@ -128,3 +128,4 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
return counts;
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.cardinality;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
public class ParsedCardinality extends ParsedAggregation implements Cardinality {
private long cardinalityValue;
@Override
public String getValueAsString() {
return Double.toString((double) cardinalityValue);
}
@Override
public double value() {
return getValue();
}
@Override
public long getValue() {
return cardinalityValue;
}
@Override
public String getType() {
return CardinalityAggregationBuilder.NAME;
}
private static final ObjectParser<ParsedCardinality, Void> PARSER = new ObjectParser<>(
ParsedCardinality.class.getSimpleName(), true, ParsedCardinality::new);
static {
declareAggregationFields(PARSER);
PARSER.declareLong((agg, value) -> agg.cardinalityValue = value, CommonFields.VALUE);
}
public static ParsedCardinality fromXContent(XContentParser parser, final String name) {
ParsedCardinality cardinality = PARSER.apply(parser, null);
cardinality.setName(name);
return cardinality;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params)
throws IOException {
builder.field(CommonFields.VALUE.getPreferredName(), cardinalityValue);
return builder;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.metrics.geobounds;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -32,6 +33,14 @@ import java.util.Map;
import java.util.Objects;
public class InternalGeoBounds extends InternalAggregation implements GeoBounds {
static final ParseField BOUNDS_FIELD = new ParseField("bounds");
static final ParseField TOP_LEFT_FIELD = new ParseField("top_left");
static final ParseField BOTTOM_RIGHT_FIELD = new ParseField("bottom_right");
static final ParseField LAT_FIELD = new ParseField("lat");
static final ParseField LON_FIELD = new ParseField("lon");
final double top;
final double bottom;
final double posLeft;
@ -170,14 +179,14 @@ public class InternalGeoBounds extends InternalAggregation implements GeoBounds
GeoPoint topLeft = topLeft();
GeoPoint bottomRight = bottomRight();
if (topLeft != null) {
builder.startObject("bounds");
builder.startObject("top_left");
builder.field("lat", topLeft.lat());
builder.field("lon", topLeft.lon());
builder.startObject(BOUNDS_FIELD.getPreferredName());
builder.startObject(TOP_LEFT_FIELD.getPreferredName());
builder.field(LAT_FIELD.getPreferredName(), topLeft.lat());
builder.field(LON_FIELD.getPreferredName(), topLeft.lon());
builder.endObject();
builder.startObject("bottom_right");
builder.field("lat", bottomRight.lat());
builder.field("lon", bottomRight.lon());
builder.startObject(BOTTOM_RIGHT_FIELD.getPreferredName());
builder.field(LAT_FIELD.getPreferredName(), bottomRight.lat());
builder.field(LON_FIELD.getPreferredName(), bottomRight.lon());
builder.endObject();
builder.endObject();
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.geobounds;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.BOTTOM_RIGHT_FIELD;
import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.BOUNDS_FIELD;
import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.LAT_FIELD;
import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.LON_FIELD;
import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.TOP_LEFT_FIELD;
public class ParsedGeoBounds extends ParsedAggregation implements GeoBounds {
private GeoPoint topLeft;
private GeoPoint bottomRight;
@Override
public String getType() {
return GeoBoundsAggregationBuilder.NAME;
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
if (topLeft != null) {
builder.startObject("bounds");
builder.startObject("top_left");
builder.field("lat", topLeft.getLat());
builder.field("lon", topLeft.getLon());
builder.endObject();
builder.startObject("bottom_right");
builder.field("lat", bottomRight.getLat());
builder.field("lon", bottomRight.getLon());
builder.endObject();
builder.endObject();
}
return builder;
}
@Override
public GeoPoint topLeft() {
return topLeft;
}
@Override
public GeoPoint bottomRight() {
return bottomRight;
}
private static final ObjectParser<ParsedGeoBounds, Void> PARSER = new ObjectParser<>(ParsedGeoBounds.class.getSimpleName(), true,
ParsedGeoBounds::new);
private static final ConstructingObjectParser<Tuple<GeoPoint, GeoPoint>, Void> BOUNDS_PARSER =
new ConstructingObjectParser<>(ParsedGeoBounds.class.getSimpleName() + "_BOUNDS", true,
args -> new Tuple<>((GeoPoint) args[0], (GeoPoint) args[1]));
private static final ObjectParser<GeoPoint, Void> GEO_POINT_PARSER = new ObjectParser<>(
ParsedGeoBounds.class.getSimpleName() + "_POINT", true, GeoPoint::new);
static {
declareAggregationFields(PARSER);
PARSER.declareObject((agg, bbox) -> {
agg.topLeft = bbox.v1();
agg.bottomRight = bbox.v2();
}, BOUNDS_PARSER, BOUNDS_FIELD);
BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, TOP_LEFT_FIELD);
BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, BOTTOM_RIGHT_FIELD);
GEO_POINT_PARSER.declareDouble(GeoPoint::resetLat, LAT_FIELD);
GEO_POINT_PARSER.declareDouble(GeoPoint::resetLon, LON_FIELD);
}
public static ParsedGeoBounds fromXContent(XContentParser parser, final String name) {
ParsedGeoBounds geoBounds = PARSER.apply(parser, null);
geoBounds.setName(name);
return geoBounds;
}
}

View File

@ -149,9 +149,9 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr
static class Fields {
static final ParseField CENTROID = new ParseField("location");
static final ParseField COUNT = new ParseField("count");
static final ParseField CENTROID_LAT = new ParseField("lat");
static final ParseField CENTROID_LON = new ParseField("lon");
static final ParseField COUNT = new ParseField("count");
}
@Override

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.geocentroid;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid.Fields;
import java.io.IOException;
/**
* Serialization and merge logic for {@link GeoCentroidAggregator}.
*/
public class ParsedGeoCentroid extends ParsedAggregation implements GeoCentroid {
private GeoPoint centroid;
private long count;
@Override
public GeoPoint centroid() {
return centroid;
}
@Override
public long count() {
return count;
}
@Override
public String getType() {
return GeoCentroidAggregationBuilder.NAME;
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
if (centroid != null) {
builder.startObject(Fields.CENTROID.getPreferredName());
{
builder.field(Fields.CENTROID_LAT.getPreferredName(), centroid.lat());
builder.field(Fields.CENTROID_LON.getPreferredName(), centroid.lon());
}
builder.endObject();
}
builder.field(Fields.COUNT.getPreferredName(), count);
return builder;
}
private static final ObjectParser<ParsedGeoCentroid, Void> PARSER = new ObjectParser<>(ParsedGeoCentroid.class.getSimpleName(), true,
ParsedGeoCentroid::new);
private static final ObjectParser<GeoPoint, Void> GEO_POINT_PARSER = new ObjectParser<>(
ParsedGeoCentroid.class.getSimpleName() + "_POINT", true, GeoPoint::new);
static {
declareAggregationFields(PARSER);
PARSER.declareObject((agg, centroid) -> agg.centroid = centroid, GEO_POINT_PARSER, Fields.CENTROID);
PARSER.declareLong((agg, count) -> agg.count = count, Fields.COUNT);
GEO_POINT_PARSER.declareDouble(GeoPoint::resetLat, Fields.CENTROID_LAT);
GEO_POINT_PARSER.declareDouble(GeoPoint::resetLon, Fields.CENTROID_LON);
}
public static ParsedGeoCentroid fromXContent(XContentParser parser, final String name) {
ParsedGeoCentroid geoCentroid = PARSER.apply(parser, null);
geoCentroid.setName(name);
return geoCentroid;
}
}

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.max;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
public class ParsedMax extends ParsedSingleValueNumericMetricsAggregation implements Max {
@Override
public double getValue() {
return value();
}
@Override
public String getType() {
return MaxAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
boolean hasValue = !Double.isInfinite(value);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
return builder;
}
private static final ObjectParser<ParsedMax, Void> PARSER = new ObjectParser<>(ParsedMax.class.getSimpleName(), true, ParsedMax::new);
static {
declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
}
public static ParsedMax fromXContent(XContentParser parser, final String name) {
ParsedMax max = PARSER.apply(parser, null);
max.setName(name);
return max;
}
}

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.min;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
public class ParsedMin extends ParsedSingleValueNumericMetricsAggregation implements Min {
@Override
public double getValue() {
return value();
}
@Override
public String getType() {
return MinAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
boolean hasValue = !Double.isInfinite(value);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
return builder;
}
private static final ObjectParser<ParsedMin, Void> PARSER = new ObjectParser<>(ParsedMin.class.getSimpleName(), true, ParsedMin::new);
static {
declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
}
public static ParsedMin fromXContent(XContentParser parser, final String name) {
ParsedMin min = PARSER.apply(parser, null);
min.setName(name);
return min;
}
}

View File

@ -0,0 +1,33 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
public abstract class ParsedPercentileRanks extends ParsedPercentiles implements PercentileRanks {
@Override
public double percent(double value) {
return getPercentile(value);
}
@Override
public String percentAsString(double value) {
return getPercentileAsString(value);
}
}

View File

@ -0,0 +1,179 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable<Percentile> {
protected final Map<Double, Double> percentiles = new LinkedHashMap<>();
protected final Map<Double, String> percentilesAsString = new HashMap<>();
private boolean keyed;
void addPercentile(Double key, Double value) {
percentiles.put(key, value);
}
void addPercentileAsString(Double key, String valueAsString) {
percentilesAsString.put(key, valueAsString);
}
protected Double getPercentile(double percent) {
if (percentiles.isEmpty()) {
return Double.NaN;
}
return percentiles.get(percent);
}
protected String getPercentileAsString(double percent) {
String valueAsString = percentilesAsString.get(percent);
if (valueAsString != null) {
return valueAsString;
}
Double value = getPercentile(percent);
if (value != null) {
return Double.toString(value);
}
return null;
}
void setKeyed(boolean keyed) {
this.keyed = keyed;
}
@Override
public Iterator<Percentile> iterator() {
return new Iterator<Percentile>() {
final Iterator<Map.Entry<Double, Double>> iterator = percentiles.entrySet().iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Percentile next() {
Map.Entry<Double, Double> next = iterator.next();
return new Percentile(next.getKey(), next.getValue());
}
};
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
final boolean valuesAsString = (percentilesAsString.isEmpty() == false);
if (keyed) {
builder.startObject(CommonFields.VALUES.getPreferredName());
for (Map.Entry<Double, Double> percentile : percentiles.entrySet()) {
Double key = percentile.getKey();
builder.field(String.valueOf(key), percentile.getValue());
if (valuesAsString) {
builder.field(key + "_as_string", getPercentileAsString(key));
}
}
builder.endObject();
} else {
builder.startArray(CommonFields.VALUES.getPreferredName());
for (Map.Entry<Double, Double> percentile : percentiles.entrySet()) {
Double key = percentile.getKey();
builder.startObject();
{
builder.field(CommonFields.KEY.getPreferredName(), key);
builder.field(CommonFields.VALUE.getPreferredName(), percentile.getValue());
if (valuesAsString) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), getPercentileAsString(key));
}
}
builder.endObject();
}
builder.endArray();
}
return builder;
}
protected static void declarePercentilesFields(ObjectParser<? extends ParsedPercentiles, Void> objectParser) {
ParsedAggregation.declareAggregationFields(objectParser);
objectParser.declareField((parser, aggregation, context) -> {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
aggregation.setKeyed(true);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token.isValue()) {
if (token == XContentParser.Token.VALUE_NUMBER) {
aggregation.addPercentile(Double.valueOf(parser.currentName()), parser.doubleValue());
} else if (token == XContentParser.Token.VALUE_STRING) {
int i = parser.currentName().indexOf("_as_string");
if (i > 0) {
double key = Double.valueOf(parser.currentName().substring(0, i));
aggregation.addPercentileAsString(key, parser.text());
} else {
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.valueOf(parser.text()));
}
}
} else if (token == XContentParser.Token.VALUE_NULL) {
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.NaN);
}
}
} else if (token == XContentParser.Token.START_ARRAY) {
aggregation.setKeyed(false);
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Double key = null;
Double value = null;
String valueAsString = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
key = parser.doubleValue();
} else if (CommonFields.VALUE.getPreferredName().equals(currentFieldName)) {
value = parser.doubleValue();
} else if (CommonFields.VALUE_AS_STRING.getPreferredName().equals(currentFieldName)) {
valueAsString = parser.text();
}
} else if (token == XContentParser.Token.VALUE_NULL) {
value = Double.NaN;
}
}
if (key != null) {
aggregation.addPercentile(key, value);
if (valueAsString != null) {
aggregation.addPercentileAsString(key, valueAsString);
}
}
}
}
}, CommonFields.VALUES, ObjectParser.ValueType.OBJECT_ARRAY);
}
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import java.io.IOException;
import java.util.Iterator;
public class ParsedHDRPercentileRanks extends ParsedPercentileRanks {
@Override
public String getType() {
return InternalHDRPercentileRanks.NAME;
}
@Override
public Iterator<Percentile> iterator() {
final Iterator<Percentile> iterator = super.iterator();
return new Iterator<Percentile>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Percentile next() {
Percentile percentile = iterator.next();
return new Percentile(percentile.getValue(), percentile.getPercent());
}
};
}
private static ObjectParser<ParsedHDRPercentileRanks, Void> PARSER =
new ObjectParser<>(ParsedHDRPercentileRanks.class.getSimpleName(), true, ParsedHDRPercentileRanks::new);
static {
ParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedHDRPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
ParsedHDRPercentileRanks aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import java.io.IOException;
public class ParsedHDRPercentiles extends ParsedPercentiles implements Percentiles {
@Override
public String getType() {
return InternalHDRPercentiles.NAME;
}
@Override
public double percentile(double percent) {
return getPercentile(percent);
}
@Override
public String percentileAsString(double percent) {
return getPercentileAsString(percent);
}
private static ObjectParser<ParsedHDRPercentiles, Void> PARSER =
new ObjectParser<>(ParsedHDRPercentiles.class.getSimpleName(), true, ParsedHDRPercentiles::new);
static {
ParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedHDRPercentiles fromXContent(XContentParser parser, String name) throws IOException {
ParsedHDRPercentiles aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import java.io.IOException;
import java.util.Iterator;
public class ParsedTDigestPercentileRanks extends ParsedPercentileRanks {
@Override
public String getType() {
return InternalTDigestPercentileRanks.NAME;
}
@Override
public Iterator<Percentile> iterator() {
final Iterator<Percentile> iterator = super.iterator();
return new Iterator<Percentile>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Percentile next() {
Percentile percentile = iterator.next();
return new Percentile(percentile.getValue(), percentile.getPercent());
}
};
}
private static ObjectParser<ParsedTDigestPercentileRanks, Void> PARSER =
new ObjectParser<>(ParsedTDigestPercentileRanks.class.getSimpleName(), true, ParsedTDigestPercentileRanks::new);
static {
ParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedTDigestPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
ParsedTDigestPercentileRanks aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import java.io.IOException;
public class ParsedTDigestPercentiles extends ParsedPercentiles implements Percentiles {
@Override
public String getType() {
return InternalTDigestPercentiles.NAME;
}
@Override
public double percentile(double percent) {
return getPercentile(percent);
}
@Override
public String percentileAsString(double percent) {
return getPercentileAsString(percent);
}
private static ObjectParser<ParsedTDigestPercentiles, Void> PARSER =
new ObjectParser<>(ParsedTDigestPercentiles.class.getSimpleName(), true, ParsedTDigestPercentiles::new);
static {
ParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedTDigestPercentiles fromXContent(XContentParser parser, String name) throws IOException {
ParsedTDigestPercentiles aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -124,7 +124,7 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return builder.field("value", aggregation());
return builder.field(CommonFields.VALUE.getPreferredName(), aggregation());
}
@Override

View File

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.scripted;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
public class ParsedScriptedMetric extends ParsedAggregation implements ScriptedMetric {
private List<Object> aggregation;
@Override
public String getType() {
return ScriptedMetricAggregationBuilder.NAME;
}
@Override
public Object aggregation() {
assert aggregation.size() == 1; // see InternalScriptedMetric#aggregations() for why we can assume this
return aggregation.get(0);
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return builder.field(CommonFields.VALUE.getPreferredName(), aggregation());
}
private static final ObjectParser<ParsedScriptedMetric, Void> PARSER =
new ObjectParser<>(ParsedScriptedMetric.class.getSimpleName(), true, ParsedScriptedMetric::new);
static {
declareAggregationFields(PARSER);
PARSER.declareField((agg, value) -> agg.aggregation = Collections.singletonList(value),
ParsedScriptedMetric::parseValue, CommonFields.VALUE, ValueType.VALUE_OBJECT_ARRAY);
}
private static Object parseValue(XContentParser parser) throws IOException {
Token token = parser.currentToken();
Object value = null;
if (token == XContentParser.Token.VALUE_NULL) {
value = null;
} else if (token.isValue()) {
if (token == XContentParser.Token.VALUE_STRING) {
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
value = parser.map();
} else if (token == XContentParser.Token.START_ARRAY) {
value = parser.list();
}
return value;
}
public static ParsedScriptedMetric fromXContent(XContentParser parser, final String name) {
ParsedScriptedMetric aggregation = PARSER.apply(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -177,21 +177,28 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.COUNT, count);
builder.field(Fields.MIN, count != 0 ? min : null);
builder.field(Fields.MAX, count != 0 ? max : null);
builder.field(Fields.AVG, count != 0 ? getAvg() : null);
builder.field(Fields.SUM, count != 0 ? sum : null);
if (count != 0 && format != DocValueFormat.RAW) {
builder.field(Fields.MIN_AS_STRING, format.format(min));
builder.field(Fields.MAX_AS_STRING, format.format(max));
builder.field(Fields.AVG_AS_STRING, format.format(getAvg()));
builder.field(Fields.SUM_AS_STRING, format.format(sum));
if (count != 0) {
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);
builder.field(Fields.AVG, getAvg());
builder.field(Fields.SUM, sum);
if (format != DocValueFormat.RAW) {
builder.field(Fields.MIN_AS_STRING, format.format(min));
builder.field(Fields.MAX_AS_STRING, format.format(max));
builder.field(Fields.AVG_AS_STRING, format.format(getAvg()));
builder.field(Fields.SUM_AS_STRING, format.format(sum));
}
} else {
builder.nullField(Fields.MIN);
builder.nullField(Fields.MAX);
builder.nullField(Fields.AVG);
builder.nullField(Fields.SUM);
}
otherStatsToXCotent(builder, params);
otherStatsToXContent(builder, params);
return builder;
}
protected XContentBuilder otherStatsToXCotent(XContentBuilder builder, Params params) throws IOException {
protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
return builder;
}

View File

@ -0,0 +1,155 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.stats;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats.Fields;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class ParsedStats extends ParsedAggregation implements Stats {
protected long count;
protected double min;
protected double max;
protected double sum;
protected double avg;
protected final Map<String, String> valueAsString = new HashMap<>();
@Override
public long getCount() {
return count;
}
@Override
public double getMin() {
return min;
}
@Override
public double getMax() {
return max;
}
@Override
public double getAvg() {
return avg;
}
@Override
public double getSum() {
return sum;
}
@Override
public String getMinAsString() {
return valueAsString.getOrDefault(Fields.MIN_AS_STRING, Double.toString(min));
}
@Override
public String getMaxAsString() {
return valueAsString.getOrDefault(Fields.MAX_AS_STRING, Double.toString(max));
}
@Override
public String getAvgAsString() {
return valueAsString.getOrDefault(Fields.AVG_AS_STRING, Double.toString(avg));
}
@Override
public String getSumAsString() {
return valueAsString.getOrDefault(Fields.SUM_AS_STRING, Double.toString(sum));
}
@Override
public String getType() {
return StatsAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.COUNT, count);
if (count != 0) {
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);
builder.field(Fields.AVG, avg);
builder.field(Fields.SUM, sum);
if (valueAsString.get(Fields.MIN_AS_STRING) != null) {
builder.field(Fields.MIN_AS_STRING, getMinAsString());
builder.field(Fields.MAX_AS_STRING, getMaxAsString());
builder.field(Fields.AVG_AS_STRING, getAvgAsString());
builder.field(Fields.SUM_AS_STRING, getSumAsString());
}
} else {
builder.nullField(Fields.MIN);
builder.nullField(Fields.MAX);
builder.nullField(Fields.AVG);
builder.nullField(Fields.SUM);
}
otherStatsToXContent(builder, params);
return builder;
}
private static final ObjectParser<ParsedStats, Void> PARSER = new ObjectParser<>(ParsedStats.class.getSimpleName(), true,
ParsedStats::new);
static {
declareStatsFields(PARSER);
}
protected static void declareStatsFields(ObjectParser<? extends ParsedStats, Void> objectParser) {
declareAggregationFields(objectParser);
objectParser.declareLong((agg, value) -> agg.count = value, new ParseField(Fields.COUNT));
objectParser.declareField((agg, value) -> agg.min = value, (parser, context) -> parseDouble(parser, Double.POSITIVE_INFINITY),
new ParseField(Fields.MIN), ValueType.DOUBLE_OR_NULL);
objectParser.declareField((agg, value) -> agg.max = value, (parser, context) -> parseDouble(parser, Double.NEGATIVE_INFINITY),
new ParseField(Fields.MAX), ValueType.DOUBLE_OR_NULL);
objectParser.declareField((agg, value) -> agg.avg = value, (parser, context) -> parseDouble(parser, 0), new ParseField(Fields.AVG),
ValueType.DOUBLE_OR_NULL);
objectParser.declareField((agg, value) -> agg.sum = value, (parser, context) -> parseDouble(parser, 0), new ParseField(Fields.SUM),
ValueType.DOUBLE_OR_NULL);
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.MIN_AS_STRING, value),
new ParseField(Fields.MIN_AS_STRING));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.MAX_AS_STRING, value),
new ParseField(Fields.MAX_AS_STRING));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.AVG_AS_STRING, value),
new ParseField(Fields.AVG_AS_STRING));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.SUM_AS_STRING, value),
new ParseField(Fields.SUM_AS_STRING));
}
public static ParsedStats fromXContent(XContentParser parser, final String name) {
ParsedStats parsedStats = PARSER.apply(parser, null);
parsedStats.setName(name);
return parsedStats;
}
protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
return builder;
}
}

View File

@ -169,25 +169,38 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
}
@Override
protected XContentBuilder otherStatsToXCotent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.SUM_OF_SQRS, count != 0 ? sumOfSqrs : null);
builder.field(Fields.VARIANCE, count != 0 ? getVariance() : null);
builder.field(Fields.STD_DEVIATION, count != 0 ? getStdDeviation() : null);
builder.startObject(Fields.STD_DEVIATION_BOUNDS)
.field(Fields.UPPER, count != 0 ? getStdDeviationBound(Bounds.UPPER) : null)
.field(Fields.LOWER, count != 0 ? getStdDeviationBound(Bounds.LOWER) : null)
.endObject();
if (count != 0 && format != DocValueFormat.RAW) {
builder.field(Fields.SUM_OF_SQRS_AS_STRING, format.format(sumOfSqrs));
builder.field(Fields.VARIANCE_AS_STRING, format.format(getVariance()));
builder.field(Fields.STD_DEVIATION_AS_STRING, getStdDeviationAsString());
builder.startObject(Fields.STD_DEVIATION_BOUNDS_AS_STRING)
.field(Fields.UPPER, getStdDeviationBoundAsString(Bounds.UPPER))
.field(Fields.LOWER, getStdDeviationBoundAsString(Bounds.LOWER))
.endObject();
protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
if (count != 0) {
builder.field(Fields.SUM_OF_SQRS, sumOfSqrs);
builder.field(Fields.VARIANCE, getVariance());
builder.field(Fields.STD_DEVIATION, getStdDeviation());
builder.startObject(Fields.STD_DEVIATION_BOUNDS);
{
builder.field(Fields.UPPER, getStdDeviationBound(Bounds.UPPER));
builder.field(Fields.LOWER, getStdDeviationBound(Bounds.LOWER));
}
builder.endObject();
if (format != DocValueFormat.RAW) {
builder.field(Fields.SUM_OF_SQRS_AS_STRING, format.format(sumOfSqrs));
builder.field(Fields.VARIANCE_AS_STRING, format.format(getVariance()));
builder.field(Fields.STD_DEVIATION_AS_STRING, getStdDeviationAsString());
builder.startObject(Fields.STD_DEVIATION_BOUNDS_AS_STRING);
{
builder.field(Fields.UPPER, getStdDeviationBoundAsString(Bounds.UPPER));
builder.field(Fields.LOWER, getStdDeviationBoundAsString(Bounds.LOWER));
}
builder.endObject();
}
} else {
builder.nullField(Fields.SUM_OF_SQRS);
builder.nullField(Fields.VARIANCE);
builder.nullField(Fields.STD_DEVIATION);
builder.startObject(Fields.STD_DEVIATION_BOUNDS);
{
builder.nullField(Fields.UPPER);
builder.nullField(Fields.LOWER);
}
builder.endObject();
}
return builder;
}

View File

@ -0,0 +1,188 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.stats.extended;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats.Fields;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class ParsedExtendedStats extends ParsedStats implements ExtendedStats {
protected double sumOfSquares;
protected double variance;
protected double stdDeviation;
protected double stdDeviationBoundUpper;
protected double stdDeviationBoundLower;
protected double sum;
protected double avg;
@Override
public String getType() {
return ExtendedStatsAggregationBuilder.NAME;
}
@Override
public double getSumOfSquares() {
return sumOfSquares;
}
@Override
public double getVariance() {
return variance;
}
@Override
public double getStdDeviation() {
return stdDeviation;
}
private void setStdDeviationBounds(Tuple<Double, Double> bounds) {
this.stdDeviationBoundLower = bounds.v1();
this.stdDeviationBoundUpper = bounds.v2();
}
@Override
public double getStdDeviationBound(Bounds bound) {
return (bound.equals(Bounds.LOWER)) ? stdDeviationBoundLower : stdDeviationBoundUpper;
}
@Override
public String getStdDeviationAsString() {
return valueAsString.getOrDefault(Fields.STD_DEVIATION_AS_STRING, Double.toString(stdDeviation));
}
private void setStdDeviationBoundsAsString(Tuple<String, String> boundsAsString) {
this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower", boundsAsString.v1());
this.valueAsString.put(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper", boundsAsString.v2());
}
@Override
public String getStdDeviationBoundAsString(Bounds bound) {
if (bound.equals(Bounds.LOWER)) {
return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower", Double.toString(stdDeviationBoundLower));
} else {
return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper", Double.toString(stdDeviationBoundUpper));
}
}
@Override
public String getSumOfSquaresAsString() {
return valueAsString.getOrDefault(Fields.SUM_OF_SQRS_AS_STRING, Double.toString(sumOfSquares));
}
@Override
public String getVarianceAsString() {
return valueAsString.getOrDefault(Fields.VARIANCE_AS_STRING, Double.toString(variance));
}
@Override
protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException {
if (count != 0) {
builder.field(Fields.SUM_OF_SQRS, sumOfSquares);
builder.field(Fields.VARIANCE, getVariance());
builder.field(Fields.STD_DEVIATION, getStdDeviation());
builder.startObject(Fields.STD_DEVIATION_BOUNDS);
{
builder.field(Fields.UPPER, getStdDeviationBound(Bounds.UPPER));
builder.field(Fields.LOWER, getStdDeviationBound(Bounds.LOWER));
}
builder.endObject();
if (valueAsString.containsKey(Fields.SUM_OF_SQRS_AS_STRING)) {
builder.field(Fields.SUM_OF_SQRS_AS_STRING, getSumOfSquaresAsString());
builder.field(Fields.VARIANCE_AS_STRING, getVarianceAsString());
builder.field(Fields.STD_DEVIATION_AS_STRING, getStdDeviationAsString());
builder.startObject(Fields.STD_DEVIATION_BOUNDS_AS_STRING);
{
builder.field(Fields.UPPER, getStdDeviationBoundAsString(Bounds.UPPER));
builder.field(Fields.LOWER, getStdDeviationBoundAsString(Bounds.LOWER));
}
builder.endObject();
}
} else {
builder.nullField(Fields.SUM_OF_SQRS);
builder.nullField(Fields.VARIANCE);
builder.nullField(Fields.STD_DEVIATION);
builder.startObject(Fields.STD_DEVIATION_BOUNDS);
{
builder.nullField(Fields.UPPER);
builder.nullField(Fields.LOWER);
}
builder.endObject();
}
return builder;
}
private static final ObjectParser<ParsedExtendedStats, Void> PARSER = new ObjectParser<>(ParsedExtendedStats.class.getSimpleName(),
true, ParsedExtendedStats::new);
private static final ConstructingObjectParser<Tuple<Double, Double>, Void> STD_BOUNDS_PARSER = new ConstructingObjectParser<>(
ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS", true, args -> new Tuple<>((Double) args[0], (Double) args[1]));
private static final ConstructingObjectParser<Tuple<String, String>, Void> STD_BOUNDS_AS_STRING_PARSER = new ConstructingObjectParser<>(
ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS_AS_STRING", true,
args -> new Tuple<>((String) args[0], (String) args[1]));
static {
STD_BOUNDS_PARSER.declareField(constructorArg(), (parser, context) -> parseDouble(parser, 0),
new ParseField(Fields.LOWER), ValueType.DOUBLE_OR_NULL);
STD_BOUNDS_PARSER.declareField(constructorArg(), (parser, context) -> parseDouble(parser, 0),
new ParseField(Fields.UPPER), ValueType.DOUBLE_OR_NULL);
STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.LOWER));
STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.UPPER));
declareExtendedStatsFields(PARSER);
}
protected static void declareExtendedStatsFields(ObjectParser<? extends ParsedExtendedStats, Void> objectParser) {
declareAggregationFields(objectParser);
declareStatsFields(objectParser);
objectParser.declareField((agg, value) -> agg.sumOfSquares = value, (parser, context) -> parseDouble(parser, 0),
new ParseField(Fields.SUM_OF_SQRS), ValueType.DOUBLE_OR_NULL);
objectParser.declareField((agg, value) -> agg.variance = value, (parser, context) -> parseDouble(parser, 0),
new ParseField(Fields.VARIANCE), ValueType.DOUBLE_OR_NULL);
objectParser.declareField((agg, value) -> agg.stdDeviation = value, (parser, context) -> parseDouble(parser, 0),
new ParseField(Fields.STD_DEVIATION), ValueType.DOUBLE_OR_NULL);
objectParser.declareObject(ParsedExtendedStats::setStdDeviationBounds, STD_BOUNDS_PARSER,
new ParseField(Fields.STD_DEVIATION_BOUNDS));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.SUM_OF_SQRS_AS_STRING, value),
new ParseField(Fields.SUM_OF_SQRS_AS_STRING));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.VARIANCE_AS_STRING, value),
new ParseField(Fields.VARIANCE_AS_STRING));
objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_AS_STRING, value),
new ParseField(Fields.STD_DEVIATION_AS_STRING));
objectParser.declareObject(ParsedExtendedStats::setStdDeviationBoundsAsString, STD_BOUNDS_AS_STRING_PARSER,
new ParseField(Fields.STD_DEVIATION_BOUNDS_AS_STRING));
}
public static ParsedExtendedStats fromXContent(XContentParser parser, final String name) {
ParsedExtendedStats parsedStats = PARSER.apply(parser, null);
parsedStats.setName(name);
return parsedStats;
}
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.sum;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
public class ParsedSum extends ParsedSingleValueNumericMetricsAggregation implements Sum {
@Override
public double getValue() {
return value();
}
@Override
public String getType() {
return SumAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.VALUE.getPreferredName(), value);
if (valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
return builder;
}
private static final ObjectParser<ParsedSum, Void> PARSER = new ObjectParser<>(ParsedSum.class.getSimpleName(), true, ParsedSum::new);
static {
declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
}
public static ParsedSum fromXContent(XContentParser parser, final String name) {
ParsedSum sum = PARSER.apply(parser, null);
sum.setName(name);
return sum;
}
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.tophits;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
public class ParsedTopHits extends ParsedAggregation implements TopHits {
private SearchHits searchHits;
@Override
public String getType() {
return TopHitsAggregationBuilder.NAME;
}
@Override
public SearchHits getHits() {
return searchHits;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return searchHits.toXContent(builder, params);
}
private static ObjectParser<ParsedTopHits, Void> PARSER =
new ObjectParser<>(ParsedTopHits.class.getSimpleName(), true, ParsedTopHits::new);
static {
declareAggregationFields(PARSER);
PARSER.declareObject((topHit, searchHits) -> topHit.searchHits = searchHits, (parser, context) -> SearchHits.fromXContent(parser),
new ParseField(SearchHits.Fields.HITS));
}
public static ParsedTopHits fromXContent(XContentParser parser, String name) throws IOException {
ParsedTopHits aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.valuecount;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
public class ParsedValueCount extends ParsedAggregation implements ValueCount {
private long valueCount;
@Override
public double value() {
return getValue();
}
@Override
public long getValue() {
return valueCount;
}
@Override
public String getValueAsString() {
// InternalValueCount doesn't print "value_as_string", but you can get a formatted value using
// getValueAsString() using the raw formatter and converting the value to double
return Double.toString(valueCount);
}
@Override
public String getType() {
return ValueCountAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(CommonFields.VALUE.getPreferredName(), valueCount);
return builder;
}
private static final ObjectParser<ParsedValueCount, Void> PARSER = new ObjectParser<>(ParsedValueCount.class.getSimpleName(), true,
ParsedValueCount::new);
static {
declareAggregationFields(PARSER);
PARSER.declareLong((agg, value) -> agg.valueCount = value, CommonFields.VALUE);
}
public static ParsedValueCount fromXContent(XContentParser parser, final String name) {
ParsedValueCount sum = PARSER.apply(parser, null);
sum.setName(name);
return sum;
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
public class ParsedSimpleValue extends ParsedSingleValueNumericMetricsAggregation implements SimpleValue {
@Override
public String getType() {
return InternalSimpleValue.NAME;
}
private static final ObjectParser<ParsedSimpleValue, Void> PARSER = new ObjectParser<>(ParsedSimpleValue.class.getSimpleName(), true,
ParsedSimpleValue::new);
static {
declareSingleValueFields(PARSER, Double.NaN);
}
public static ParsedSimpleValue fromXContent(XContentParser parser, final String name) {
ParsedSimpleValue simpleValue = PARSER.apply(parser, null);
simpleValue.setName(name);
return simpleValue;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
boolean hasValue = Double.isNaN(value) == false;
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
return builder;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -28,11 +29,14 @@ import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggre
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalBucketMetricValue extends InternalNumericMetricsAggregation.SingleValue implements BucketMetricValue {
public static final String NAME = "bucket_metric_value";
static final ParseField KEYS_FIELD = new ParseField("keys");
private double value;
private String[] keys;
@ -88,7 +92,7 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
return this;
} else if (path.size() == 1 && "value".equals(path.get(0))) {
return value();
} else if (path.size() == 1 && "keys".equals(path.get(0))) {
} else if (path.size() == 1 && KEYS_FIELD.getPreferredName().equals(path.get(0))) {
return keys();
} else {
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
@ -102,7 +106,7 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
if (hasValue && format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
}
builder.startArray("keys");
builder.startArray(KEYS_FIELD.getPreferredName());
for (String key : keys) {
builder.value(key);
}
@ -110,4 +114,15 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
return builder;
}
@Override
protected int doHashCode() {
return Objects.hash(value, Arrays.hashCode(keys));
}
@Override
protected boolean doEquals(Object obj) {
InternalBucketMetricValue other = (InternalBucketMetricValue) obj;
return Objects.equals(value, other.value)
&& Arrays.equals(keys, other.keys);
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
public class ParsedBucketMetricValue extends ParsedSingleValueNumericMetricsAggregation implements BucketMetricValue {
private List<String> keys = Collections.emptyList();
@Override
public String[] keys() {
return this.keys.toArray(new String[keys.size()]);
}
@Override
public String getType() {
return InternalBucketMetricValue.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
boolean hasValue = !Double.isInfinite(value);
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
if (hasValue && valueAsString != null) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
}
builder.startArray(InternalBucketMetricValue.KEYS_FIELD.getPreferredName());
for (String key : keys) {
builder.value(key);
}
builder.endArray();
return builder;
}
private static final ObjectParser<ParsedBucketMetricValue, Void> PARSER = new ObjectParser<>(
ParsedBucketMetricValue.class.getSimpleName(), true, ParsedBucketMetricValue::new);
static {
declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY);
PARSER.declareStringArray((agg, value) -> agg.keys = value, InternalBucketMetricValue.KEYS_FIELD);
}
public static ParsedBucketMetricValue fromXContent(XContentParser parser, final String name) {
ParsedBucketMetricValue bucketMetricValue = PARSER.apply(parser, null);
bucketMetricValue.setName(name);
return bucketMetricValue;
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import java.io.IOException;
import java.util.Map.Entry;
public class ParsedPercentilesBucket extends ParsedPercentiles implements Percentiles {
@Override
public String getType() {
return PercentilesBucketPipelineAggregationBuilder.NAME;
}
@Override
public double percentile(double percent) throws IllegalArgumentException {
Double value = percentiles.get(percent);
if (value == null) {
throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" +
" one of the computed percentiles. Available keys are: " + percentiles.keySet());
}
return value;
}
@Override
public String percentileAsString(double percent) {
double value = percentile(percent); // check availability as unformatted value
String valueAsString = percentilesAsString.get(percent);
if (valueAsString != null) {
return valueAsString;
} else {
return Double.toString(value);
}
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startObject("values");
for (Entry<Double, Double> percent : percentiles.entrySet()) {
double value = percent.getValue();
boolean hasValue = !(Double.isNaN(value));
Double key = percent.getKey();
builder.field(Double.toString(key), hasValue ? value : null);
String valueAsString = percentilesAsString.get(key);
if (hasValue && valueAsString != null) {
builder.field(key + "_as_string", valueAsString);
}
}
builder.endObject();
return builder;
}
private static ObjectParser<ParsedPercentilesBucket, Void> PARSER =
new ObjectParser<>(ParsedPercentilesBucket.class.getSimpleName(), true, ParsedPercentilesBucket::new);
static {
ParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedPercentilesBucket fromXContent(XContentParser parser, String name) throws IOException {
ParsedPercentilesBucket aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
public class ParsedStatsBucket extends ParsedStats implements StatsBucket {
@Override
public String getType() {
return StatsBucketPipelineAggregationBuilder.NAME;
}
private static final ObjectParser<ParsedStatsBucket, Void> PARSER = new ObjectParser<>(
ParsedStatsBucket.class.getSimpleName(), true, ParsedStatsBucket::new);
static {
declareStatsFields(PARSER);
}
public static ParsedStatsBucket fromXContent(XContentParser parser, final String name) {
ParsedStatsBucket parsedStatsBucket = PARSER.apply(parser, null);
parsedStatsBucket.setName(name);
return parsedStatsBucket;
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats;
public class ParsedExtendedStatsBucket extends ParsedExtendedStats implements ExtendedStatsBucket {
@Override
public String getType() {
return ExtendedStatsBucketPipelineAggregationBuilder.NAME;
}
private static final ObjectParser<ParsedExtendedStatsBucket, Void> PARSER = new ObjectParser<>(
ParsedExtendedStatsBucket.class.getSimpleName(), true, ParsedExtendedStatsBucket::new);
static {
declareExtendedStatsFields(PARSER);
}
public static ParsedExtendedStatsBucket fromXContent(XContentParser parser, final String name) {
ParsedExtendedStatsBucket parsedStatsBucket = PARSER.apply(parser, null);
parsedStatsBucket.setName(name);
return parsedStatsBucket;
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.derivative;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
import java.io.IOException;
public class ParsedDerivative extends ParsedSimpleValue implements Derivative {
private double normalizedValue;
private String normalizedAsString;
private boolean hasNormalizationFactor;
private static final ParseField NORMALIZED_AS_STRING = new ParseField("normalized_value_as_string");
private static final ParseField NORMALIZED = new ParseField("normalized_value");
@Override
public double normalizedValue() {
return this.normalizedValue;
}
@Override
public String getType() {
return DerivativePipelineAggregationBuilder.NAME;
}
private static final ObjectParser<ParsedDerivative, Void> PARSER = new ObjectParser<>(ParsedDerivative.class.getSimpleName(), true,
ParsedDerivative::new);
static {
declareSingleValueFields(PARSER, Double.NaN);
PARSER.declareField((agg, normalized) -> {
agg.normalizedValue = normalized;
agg.hasNormalizationFactor = true;
}, (parser, context) -> parseDouble(parser, Double.NaN), NORMALIZED, ValueType.DOUBLE_OR_NULL);
PARSER.declareString((agg, normalAsString) -> agg.normalizedAsString = normalAsString, NORMALIZED_AS_STRING);
}
public static ParsedDerivative fromXContent(XContentParser parser, final String name) {
ParsedDerivative derivative = PARSER.apply(parser, null);
derivative.setName(name);
return derivative;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
super.doXContentBody(builder, params);
if (hasNormalizationFactor) {
boolean hasValue = Double.isNaN(normalizedValue) == false;
builder.field(NORMALIZED.getPreferredName(), hasValue ? normalizedValue : null);
if (hasValue && normalizedAsString != null) {
builder.field(NORMALIZED_AS_STRING.getPreferredName(), normalizedAsString);
}
}
return builder;
}
}

View File

@ -19,148 +19,50 @@
package org.elasticsearch.search.internal;
import org.elasticsearch.action.search.SearchResponseSections;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
public class InternalSearchResponse implements Streamable, ToXContent {
/**
* {@link SearchResponseSections} subclass that can be serialized over the wire.
*/
public class InternalSearchResponse extends SearchResponseSections implements Writeable, ToXContent {
public static InternalSearchResponse empty() {
return new InternalSearchResponse(SearchHits.empty(), null, null, null, false, null, 1);
}
private SearchHits hits;
private InternalAggregations aggregations;
private Suggest suggest;
private SearchProfileShardResults profileResults;
private boolean timedOut;
private Boolean terminatedEarly = null;
private int numReducePhases = 1;
private InternalSearchResponse() {
}
public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations, Suggest suggest,
SearchProfileShardResults profileResults, boolean timedOut, Boolean terminatedEarly,
int numReducePhases) {
this.hits = hits;
this.aggregations = aggregations;
this.suggest = suggest;
this.profileResults = profileResults;
this.timedOut = timedOut;
this.terminatedEarly = terminatedEarly;
this.numReducePhases = numReducePhases;
super(hits, aggregations, suggest, timedOut, terminatedEarly, profileResults, numReducePhases);
}
public boolean timedOut() {
return this.timedOut;
}
public Boolean terminatedEarly() {
return this.terminatedEarly;
}
public SearchHits hits() {
return hits;
}
public Aggregations aggregations() {
return aggregations;
}
public Suggest suggest() {
return suggest;
}
/**
* Returns the number of reduce phases applied to obtain this search response
*/
public int getNumReducePhases() {
return numReducePhases;
}
/**
* Returns the profile results for this search response (including all shards).
* An empty map is returned if profiling was not enabled
*
* @return Profile results
*/
public Map<String, ProfileShardResult> profile() {
if (profileResults == null) {
return Collections.emptyMap();
}
return profileResults.getShardResults();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
hits.toXContent(builder, params);
if (aggregations != null) {
aggregations.toXContent(builder, params);
}
if (suggest != null) {
suggest.toXContent(builder, params);
}
if (profileResults != null) {
profileResults.toXContent(builder, params);
}
return builder;
}
public static InternalSearchResponse readInternalSearchResponse(StreamInput in) throws IOException {
InternalSearchResponse response = new InternalSearchResponse();
response.readFrom(in);
return response;
}
@Override
public void readFrom(StreamInput in) throws IOException {
hits = SearchHits.readSearchHits(in);
if (in.readBoolean()) {
aggregations = InternalAggregations.readAggregations(in);
}
if (in.readBoolean()) {
suggest = Suggest.readSuggest(in);
}
timedOut = in.readBoolean();
terminatedEarly = in.readOptionalBoolean();
profileResults = in.readOptionalWriteable(SearchProfileShardResults::new);
numReducePhases = in.readVInt();
public InternalSearchResponse(StreamInput in) throws IOException {
super(
SearchHits.readSearchHits(in),
in.readBoolean() ? InternalAggregations.readAggregations(in) : null,
in.readBoolean() ? Suggest.readSuggest(in) : null,
in.readBoolean(),
in.readOptionalBoolean(),
in.readOptionalWriteable(SearchProfileShardResults::new),
in.readVInt()
);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
hits.writeTo(out);
if (aggregations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
aggregations.writeTo(out);
}
if (suggest == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
suggest.writeTo(out);
}
out.writeOptionalStreamable((InternalAggregations)aggregations);
out.writeOptionalStreamable(suggest);
out.writeBoolean(timedOut);
out.writeOptionalBoolean(terminatedEarly);
out.writeOptionalWriteable(profileResults);

View File

@ -60,7 +60,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
*/
public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? extends Option>>>, Streamable, ToXContent {
static final String NAME = "suggest";
public static final String NAME = "suggest";
public static final Comparator<Option> COMPARATOR = (first, second) -> {
int cmp = Float.compare(second.getScore(), first.getScore());
@ -386,6 +386,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
@SuppressWarnings("unchecked")
public static Suggestion<? extends Entry<? extends Option>> fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
return XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class);
}

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchHitsTests;
import org.elasticsearch.search.aggregations.AggregationsTests;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.profile.SearchProfileShardResultsTests;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestTests;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class SearchResponseTests extends ESTestCase {
private static final NamedXContentRegistry xContentRegistry;
static {
List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>(InternalAggregationTestCase.getDefaultNamedXContents());
namedXContents.addAll(SuggestTests.getDefaultNamedXContents());
xContentRegistry = new NamedXContentRegistry(namedXContents);
}
private AggregationsTests aggregationsTests = new AggregationsTests();
@Before
public void init() throws Exception {
aggregationsTests.init();
}
@After
public void cleanUp() throws Exception {
aggregationsTests.cleanUp();
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
private SearchResponse createTestItem(ShardSearchFailure... shardSearchFailures) {
SearchHits hits = SearchHitsTests.createTestItem();
boolean timedOut = randomBoolean();
Boolean terminatedEarly = randomBoolean() ? null : randomBoolean();
int numReducePhases = randomIntBetween(1, 10);
long tookInMillis = randomNonNegativeLong();
int successfulShards = randomInt();
int totalShards = randomInt();
InternalAggregations aggregations = aggregationsTests.createTestInstance();
Suggest suggest = SuggestTests.createTestItem();
SearchProfileShardResults profileShardResults = SearchProfileShardResultsTests.createTestItem();
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults,
timedOut, terminatedEarly, numReducePhases);
return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, tookInMillis, shardSearchFailures);
}
public void testFromXContent() throws IOException {
// the "_shard/total/failures" section makes if impossible to directly compare xContent, so we omit it here
SearchResponse response = createTestItem();
XContentType xcontentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
BytesReference originalBytes = toShuffledXContent(response, xcontentType, params, humanReadable);
try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) {
SearchResponse parsed = SearchResponse.fromXContent(parser);
assertToXContentEquivalent(originalBytes, XContentHelper.toXContent(parsed, xcontentType, params, humanReadable), xcontentType);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
}
/**
* The "_shard/total/failures" section makes if impossible to directly compare xContent, because
* the failures in the parsed SearchResponse are wrapped in an extra ElasticSearchException on the client side.
* Because of this, in this special test case we compare the "top level" fields for equality
* and the subsections xContent equivalence independently
*/
public void testFromXContentWithFailures() throws IOException {
int numFailures = randomIntBetween(1, 5);
ShardSearchFailure[] failures = new ShardSearchFailure[numFailures];
for (int i = 0; i < failures.length; i++) {
failures[i] = ShardSearchFailureTests.createTestItem();
}
SearchResponse response = createTestItem(failures);
XContentType xcontentType = randomFrom(XContentType.values());
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
BytesReference originalBytes = toShuffledXContent(response, xcontentType, params, randomBoolean());
try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) {
SearchResponse parsed = SearchResponse.fromXContent(parser);
for (int i = 0; i < parsed.getShardFailures().length; i++) {
ShardSearchFailure parsedFailure = parsed.getShardFailures()[i];
ShardSearchFailure originalFailure = failures[i];
assertEquals(originalFailure.index(), parsedFailure.index());
assertEquals(originalFailure.shard().getNodeId(), parsedFailure.shard().getNodeId());
assertEquals(originalFailure.shardId(), parsedFailure.shardId());
String originalMsg = originalFailure.getCause().getMessage();
assertEquals(parsedFailure.getCause().getMessage(), "Elasticsearch exception [type=parsing_exception, reason=" +
originalMsg + "]");
String nestedMsg = originalFailure.getCause().getCause().getMessage();
assertEquals(parsedFailure.getCause().getCause().getMessage(),
"Elasticsearch exception [type=illegal_argument_exception, reason=" + nestedMsg + "]");
}
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
}
public void testToXContent() {
SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap());
hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit };
SearchResponse response = new SearchResponse(
new InternalSearchResponse(new SearchHits(hits, 100, 1.5f), null, null, null, false, null, 1), null, 0, 0, 0,
new ShardSearchFailure[0]);
StringBuilder expectedString = new StringBuilder();
expectedString.append("{");
{
expectedString.append("\"took\":0,");
expectedString.append("\"timed_out\":false,");
expectedString.append("\"_shards\":");
{
expectedString.append("{\"total\":0,");
expectedString.append("\"successful\":0,");
expectedString.append("\"failed\":0},");
}
expectedString.append("\"hits\":");
{
expectedString.append("{\"total\":100,");
expectedString.append("\"max_score\":1.5,");
expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}");
}
}
expectedString.append("}");
assertEquals(expectedString.toString(), Strings.toString(response));
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.search;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
@ -50,7 +51,7 @@ public class ShardSearchFailureTests extends ESTestCase {
ShardSearchFailure response = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(response, xContentType, humanReadable);
BytesReference originalBytes = toShuffledXContent(response, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
ShardSearchFailure parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {

View File

@ -65,12 +65,10 @@ public class XContentParserUtilsTests extends ESTestCase {
BytesReference bytes = toXContent((builder, params) -> builder.field("test", 0), xContentType, randomBoolean());
try (XContentParser parser = xContentType.xContent().createParser(namedXContentRegistry, bytes)) {
parser.nextToken();
ParsingException e = expectThrows(ParsingException.class, () -> parseTypedKeysObject(parser, delimiter, Boolean.class));
assertEquals("Failed to parse object: expecting token of type [FIELD_NAME] but found [START_OBJECT]", e.getMessage());
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
parser.nextToken();
e = expectThrows(ParsingException.class, () -> parseTypedKeysObject(parser, delimiter, Boolean.class));
ParsingException e = expectThrows(ParsingException.class, () -> parseTypedKeysObject(parser, delimiter, Boolean.class));
assertEquals("Cannot parse object of class [Boolean] without type information. Set [typed_keys] parameter " +
"on the request to ensure the type information is added to the response output", e.getMessage());
}

View File

@ -0,0 +1,213 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.aggregations.bucket.adjacency.InternalAdjacencyMatrixTests;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilterTests;
import org.elasticsearch.search.aggregations.bucket.filters.InternalFiltersTests;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGridTests;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobalTests;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogramTests;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogramTests;
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissingTests;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNestedTests;
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNestedTests;
import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistanceTests;
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTermsTests;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
import org.elasticsearch.search.aggregations.metrics.InternalExtendedStatsTests;
import org.elasticsearch.search.aggregations.metrics.InternalMaxTests;
import org.elasticsearch.search.aggregations.metrics.InternalStatsBucketTests;
import org.elasticsearch.search.aggregations.metrics.InternalStatsTests;
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvgTests;
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinalityTests;
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBoundsTests;
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroidTests;
import org.elasticsearch.search.aggregations.metrics.min.InternalMinTests;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesRanksTests;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesTests;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesRanksTests;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesTests;
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetricTests;
import org.elasticsearch.search.aggregations.metrics.sum.InternalSumTests;
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHitsTests;
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCountTests;
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValueTests;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.InternalPercentilesBucketTests;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.InternalExtendedStatsBucketTests;
import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivativeTests;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static java.util.Collections.singletonMap;
/**
* This class tests that aggregations parsing works properly. It checks that we can parse
* different aggregations and adds sub-aggregations where applicable.
*
*/
public class AggregationsTests extends ESTestCase {
private static final List<InternalAggregationTestCase> aggsTests = getAggsTests();
private static List<InternalAggregationTestCase> getAggsTests() {
List<InternalAggregationTestCase> aggsTests = new ArrayList<>();
aggsTests.add(new InternalCardinalityTests());
aggsTests.add(new InternalTDigestPercentilesTests());
aggsTests.add(new InternalTDigestPercentilesRanksTests());
aggsTests.add(new InternalHDRPercentilesTests());
aggsTests.add(new InternalHDRPercentilesRanksTests());
aggsTests.add(new InternalPercentilesBucketTests());
aggsTests.add(new InternalMinTests());
aggsTests.add(new InternalMaxTests());
aggsTests.add(new InternalAvgTests());
aggsTests.add(new InternalSumTests());
aggsTests.add(new InternalValueCountTests());
aggsTests.add(new InternalSimpleValueTests());
aggsTests.add(new InternalDerivativeTests());
aggsTests.add(new InternalBucketMetricValueTests());
aggsTests.add(new InternalStatsTests());
aggsTests.add(new InternalStatsBucketTests());
aggsTests.add(new InternalExtendedStatsTests());
aggsTests.add(new InternalExtendedStatsBucketTests());
aggsTests.add(new InternalGeoBoundsTests());
aggsTests.add(new InternalGeoCentroidTests());
aggsTests.add(new InternalHistogramTests());
aggsTests.add(new InternalDateHistogramTests());
aggsTests.add(new LongTermsTests());
aggsTests.add(new DoubleTermsTests());
aggsTests.add(new StringTermsTests());
aggsTests.add(new InternalMissingTests());
aggsTests.add(new InternalNestedTests());
aggsTests.add(new InternalReverseNestedTests());
aggsTests.add(new InternalGlobalTests());
aggsTests.add(new InternalFilterTests());
aggsTests.add(new InternalSamplerTests());
aggsTests.add(new InternalGeoHashGridTests());
aggsTests.add(new InternalRangeTests());
aggsTests.add(new InternalDateRangeTests());
aggsTests.add(new InternalGeoDistanceTests());
aggsTests.add(new InternalFiltersTests());
aggsTests.add(new InternalAdjacencyMatrixTests());
aggsTests.add(new SignificantLongTermsTests());
aggsTests.add(new SignificantStringTermsTests());
aggsTests.add(new InternalScriptedMetricTests());
aggsTests.add(new InternalBinaryRangeTests());
aggsTests.add(new InternalTopHitsTests());
return Collections.unmodifiableList(aggsTests);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(InternalAggregationTestCase.getDefaultNamedXContents());
}
@Before
public void init() throws Exception {
for (InternalAggregationTestCase aggsTest : aggsTests) {
aggsTest.setUp();
}
}
@After
public void cleanUp() throws Exception {
for (InternalAggregationTestCase aggsTest : aggsTests) {
aggsTest.tearDown();
}
}
public void testAllAggsAreBeingTested() {
assertEquals(InternalAggregationTestCase.getDefaultNamedXContents().size(), aggsTests.size());
Set<String> aggs = aggsTests.stream().map((testCase) -> testCase.createTestInstance().getType()).collect(Collectors.toSet());
for (NamedXContentRegistry.Entry entry : InternalAggregationTestCase.getDefaultNamedXContents()) {
assertTrue(aggs.contains(entry.name.getPreferredName()));
}
}
public void testFromXContent() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
Aggregations aggregations = createTestInstance();
BytesReference originalBytes = toShuffledXContent(aggregations, xContentType, params, randomBoolean());
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(Aggregations.AGGREGATIONS_FIELD, parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
Aggregations parsedAggregations = Aggregations.fromXContent(parser);
BytesReference parsedBytes = XContentHelper.toXContent(parsedAggregations, xContentType, randomBoolean());
ElasticsearchAssertions.assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
}
}
public final InternalAggregations createTestInstance() {
return createTestInstance(1, 0, 5);
}
private static InternalAggregations createTestInstance(final int minNumAggs, final int currentDepth, final int maxDepth) {
int numAggs = randomIntBetween(minNumAggs, 4);
List<InternalAggregation> aggs = new ArrayList<>(numAggs);
for (int i = 0; i < numAggs; i++) {
InternalAggregationTestCase testCase = randomFrom(aggsTests);
if (testCase instanceof InternalMultiBucketAggregationTestCase) {
InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase;
if (currentDepth < maxDepth) {
multiBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth);
} else {
multiBucketAggTestCase.subAggregationsSupplier = () -> InternalAggregations.EMPTY;
}
} else if (testCase instanceof InternalSingleBucketAggregationTestCase) {
InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase;
if (currentDepth < maxDepth) {
singleBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth);
} else {
singleBucketAggTestCase.subAggregationsSupplier = () -> InternalAggregations.EMPTY;
}
}
aggs.add(testCase.createTestInstance());
}
return new InternalAggregations(aggs);
}
}

View File

@ -0,0 +1,157 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
public abstract class InternalMultiBucketAggregationTestCase<T extends InternalAggregation & MultiBucketsAggregation>
extends InternalAggregationTestCase<T> {
Supplier<InternalAggregations> subAggregationsSupplier;
@Override
public void setUp() throws Exception {
super.setUp();
if (randomBoolean()) {
subAggregationsSupplier = () -> InternalAggregations.EMPTY;
} else {
subAggregationsSupplier = () -> {
final int numAggregations = randomIntBetween(1, 3);
List<InternalAggregation> aggs = new ArrayList<>();
for (int i = 0; i < numAggregations; i++) {
aggs.add(createTestInstance(randomAlphaOfLength(5), emptyList(), emptyMap(), InternalAggregations.EMPTY));
}
return new InternalAggregations(aggs);
};
}
}
@Override
protected final T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, subAggregationsSupplier.get());
}
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, InternalAggregations aggregations);
protected abstract Class<? extends ParsedMultiBucketAggregation> implementationClass();
@Override
protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) {
assertMultiBucketsAggregations(aggregation, parsedAggregation, false);
}
public void testIterators() throws IOException {
final T aggregation = createTestInstance();
assertMultiBucketsAggregations(aggregation, parseAndAssert(aggregation, false), true);
}
private void assertMultiBucketsAggregations(Aggregation expected, Aggregation actual, boolean checkOrder) {
assertTrue(expected instanceof MultiBucketsAggregation);
MultiBucketsAggregation expectedMultiBucketsAggregation = (MultiBucketsAggregation) expected;
assertTrue(actual instanceof MultiBucketsAggregation);
MultiBucketsAggregation actualMultiBucketsAggregation = (MultiBucketsAggregation) actual;
assertMultiBucketsAggregation(expectedMultiBucketsAggregation, actualMultiBucketsAggregation, checkOrder);
List<? extends MultiBucketsAggregation.Bucket> expectedBuckets = expectedMultiBucketsAggregation.getBuckets();
List<? extends MultiBucketsAggregation.Bucket> actualBuckets = actualMultiBucketsAggregation.getBuckets();
assertEquals(expectedBuckets.size(), actualBuckets.size());
if (checkOrder) {
Iterator<? extends MultiBucketsAggregation.Bucket> expectedIt = expectedBuckets.iterator();
Iterator<? extends MultiBucketsAggregation.Bucket> actualIt = actualBuckets.iterator();
while (expectedIt.hasNext()) {
MultiBucketsAggregation.Bucket expectedBucket = expectedIt.next();
MultiBucketsAggregation.Bucket actualBucket = actualIt.next();
assertBucket(expectedBucket, actualBucket, true);
}
} else {
for (MultiBucketsAggregation.Bucket expectedBucket : expectedBuckets) {
final Object expectedKey = expectedBucket.getKey();
boolean found = false;
for (MultiBucketsAggregation.Bucket actualBucket : actualBuckets) {
final Object actualKey = actualBucket.getKey();
if ((actualKey != null && actualKey.equals(expectedKey)) || (actualKey == null && expectedKey == null)) {
found = true;
assertBucket(expectedBucket, actualBucket, false);
break;
}
}
assertTrue("Failed to find bucket with key [" + expectedBucket.getKey() + "]", found);
}
}
}
protected void assertMultiBucketsAggregation(MultiBucketsAggregation expected, MultiBucketsAggregation actual, boolean checkOrder) {
Class<? extends ParsedMultiBucketAggregation> parsedClass = implementationClass();
assertNotNull("Parsed aggregation class must not be null", parsedClass);
assertTrue(parsedClass.isInstance(actual));
assertTrue(expected instanceof InternalAggregation);
assertEquals(expected.getName(), actual.getName());
assertEquals(expected.getMetaData(), actual.getMetaData());
assertEquals(expected.getType(), actual.getType());
}
protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) {
assertTrue(expected instanceof InternalMultiBucketAggregation.InternalBucket);
assertTrue(actual instanceof ParsedMultiBucketAggregation.ParsedBucket);
assertEquals(expected.getKey(), actual.getKey());
assertEquals(expected.getKeyAsString(), actual.getKeyAsString());
assertEquals(expected.getDocCount(), actual.getDocCount());
Aggregations expectedAggregations = expected.getAggregations();
Aggregations actualAggregations = actual.getAggregations();
assertEquals(expectedAggregations.asList().size(), actualAggregations.asList().size());
if (checkOrder) {
Iterator<Aggregation> expectedIt = expectedAggregations.iterator();
Iterator<Aggregation> actualIt = actualAggregations.iterator();
while (expectedIt.hasNext()) {
Aggregation expectedAggregation = expectedIt.next();
Aggregation actualAggregation = actualIt.next();
assertMultiBucketsAggregations(expectedAggregation, actualAggregation, true);
}
} else {
for (Aggregation expectedAggregation : expectedAggregations) {
Aggregation actualAggregation = actualAggregations.get(expectedAggregation.getName());
assertNotNull(actualAggregation);
assertMultiBucketsAggregations(expectedAggregation, actualAggregation, false);
}
}
}
}

View File

@ -21,21 +21,20 @@ package org.elasticsearch.search.aggregations.bucket.adjacency;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class InternalAdjacencyMatrixTests extends InternalAggregationTestCase<InternalAdjacencyMatrix> {
public class InternalAdjacencyMatrixTests extends InternalMultiBucketAggregationTestCase<InternalAdjacencyMatrix> {
private List<String> keys;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
keys = new ArrayList<>();
@ -58,12 +57,12 @@ public class InternalAdjacencyMatrixTests extends InternalAggregationTestCase<In
@Override
protected InternalAdjacencyMatrix createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
Map<String, Object> metaData, InternalAggregations aggregations) {
final List<InternalAdjacencyMatrix.InternalBucket> buckets = new ArrayList<>();
for (int i = 0; i < keys.size(); ++i) {
String key = keys.get(i);
int docCount = randomIntBetween(0, 1000);
buckets.add(new InternalAdjacencyMatrix.InternalBucket(key, docCount, InternalAggregations.EMPTY));
buckets.add(new InternalAdjacencyMatrix.InternalBucket(key, docCount, aggregations));
}
return new InternalAdjacencyMatrix(name, buckets, pipelineAggregators, metaData);
}
@ -89,4 +88,9 @@ public class InternalAdjacencyMatrixTests extends InternalAggregationTestCase<In
protected Reader<InternalAdjacencyMatrix> instanceReader() {
return InternalAdjacencyMatrix::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedAdjacencyMatrix.class;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -43,4 +44,9 @@ public class InternalFilterTests extends InternalSingleBucketAggregationTestCase
protected Reader<InternalFilter> instanceReader() {
return InternalFilter::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedFilter.class;
}
}

View File

@ -21,39 +21,44 @@ package org.elasticsearch.search.aggregations.bucket.filters;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class InternalFiltersTests extends InternalAggregationTestCase<InternalFilters> {
public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase<InternalFilters> {
private boolean keyed;
private final List<String> keys = new ArrayList<>();
private List<String> keys;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
int numKeys = randomIntBetween(1,10);
for (int i = 0; i < numKeys; i++) {
keys.add(randomAlphaOfLength(5));
keys = new ArrayList<>();
int numBuckets = randomIntBetween(1, 5);
for (int i = 0; i < numBuckets; i++) {
if (keyed) {
keys.add(randomAlphaOfLength(5));
} else {
// this is what the FiltersAggregationBuilder ctor does when not providing KeyedFilter
keys.add(String.valueOf(i));
}
}
}
@Override
protected InternalFilters createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
protected InternalFilters createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
InternalAggregations aggregations) {
final List<InternalFilters.InternalBucket> buckets = new ArrayList<>();
for (int i = 0; i < keys.size(); ++i) {
String key = keys.get(i);
int docCount = randomIntBetween(0, 1000);
buckets.add( new InternalFilters.InternalBucket(key, docCount, InternalAggregations.EMPTY, keyed));
buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed));
}
return new InternalFilters(name, buckets, keyed, pipelineAggregators, metaData);
}
@ -80,4 +85,9 @@ public class InternalFiltersTests extends InternalAggregationTestCase<InternalFi
return InternalFilters::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedFilters.class;
}
}

View File

@ -22,24 +22,30 @@ import org.apache.lucene.index.IndexWriter;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class InternalGeoHashGridTests extends InternalAggregationTestCase<InternalGeoHashGrid> {
public class InternalGeoHashGridTests extends InternalMultiBucketAggregationTestCase<InternalGeoHashGrid> {
@Override
protected InternalGeoHashGrid createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
int size = randomIntBetween(1, 100);
protected InternalGeoHashGrid createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
int size = randomIntBetween(1, 3);
List<InternalGeoHashGrid.Bucket> buckets = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
long geoHashAsLong = GeoHashUtils.longEncode(randomInt(90), randomInt(90), 4);
buckets.add(new InternalGeoHashGrid.Bucket(geoHashAsLong, randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY));
double latitude = randomDoubleBetween(-90.0, 90.0, false);
double longitude = randomDoubleBetween(-180.0, 180.0, false);
long geoHashAsLong = GeoHashUtils.longEncode(longitude, latitude, 4);
buckets.add(new InternalGeoHashGrid.Bucket(geoHashAsLong, randomInt(IndexWriter.MAX_DOCS), aggregations));
}
return new InternalGeoHashGrid(name, size, buckets, pipelineAggregators, metaData);
}
@ -87,4 +93,9 @@ public class InternalGeoHashGridTests extends InternalAggregationTestCase<Intern
assertEquals(expected.getKey(), actual.getKey());
}
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedGeoHashGrid.class;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.global;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -44,4 +45,8 @@ public class InternalGlobalTests extends InternalSingleBucketAggregationTestCase
return InternalGlobal::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedGlobal.class;
}
}

View File

@ -22,6 +22,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.InternalAggregationTestCase;
@ -36,14 +38,23 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueHours;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
public class InternalDateHistogramTests extends InternalAggregationTestCase<InternalDateHistogram> {
public class InternalDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalDateHistogram> {
private boolean keyed;
private DocValueFormat format;
@Override
protected InternalDateHistogram createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
format = randomNumericDocValueFormat();
}
boolean keyed = randomBoolean();
DocValueFormat format = DocValueFormat.RAW;
@Override
protected InternalDateHistogram createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
int nbBuckets = randomInt(10);
List<InternalDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
long startingDate = System.currentTimeMillis();
@ -53,7 +64,7 @@ public class InternalDateHistogramTests extends InternalAggregationTestCase<Inte
for (int i = 0; i < nbBuckets; i++) {
long key = startingDate + (intervalMillis * i);
buckets.add(i, new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, InternalAggregations.EMPTY));
buckets.add(i, new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations));
}
BucketOrder order = randomFrom(BucketOrder.key(true), BucketOrder.key(false));
@ -81,4 +92,9 @@ public class InternalDateHistogramTests extends InternalAggregationTestCase<Inte
protected Writeable.Reader<InternalDateHistogram> instanceReader() {
return InternalDateHistogram::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedDateHistogram.class;
}
}

View File

@ -22,33 +22,44 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class InternalHistogramTests extends InternalAggregationTestCase<InternalHistogram> {
public class InternalHistogramTests extends InternalMultiBucketAggregationTestCase<InternalHistogram> {
private boolean keyed;
private DocValueFormat format;
@Override
protected InternalHistogram createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
final boolean keyed = randomBoolean();
final DocValueFormat format = DocValueFormat.RAW;
public void setUp() throws Exception{
super.setUp();
keyed = randomBoolean();
format = randomNumericDocValueFormat();
}
@Override
protected InternalHistogram createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
final int base = randomInt(50) - 30;
final int numBuckets = randomInt(10);
final int interval = randomIntBetween(1, 3);
List<InternalHistogram.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < numBuckets; ++i) {
final int docCount = TestUtil.nextInt(random(), 1, 50);
buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, InternalAggregations.EMPTY));
buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations));
}
return new InternalHistogram(name, buckets, BucketOrder.key(true),
1, null, format, keyed, pipelineAggregators, metaData);
BucketOrder order = BucketOrder.key(randomBoolean());
return new InternalHistogram(name, buckets, order, 1, null, format, keyed, pipelineAggregators, metaData);
}
@Override
@ -73,4 +84,8 @@ public class InternalHistogramTests extends InternalAggregationTestCase<Internal
return InternalHistogram::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedHistogram.class;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.missing;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -44,4 +45,8 @@ public class InternalMissingTests extends InternalSingleBucketAggregationTestCas
return InternalMissing::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedMissing.class;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -43,4 +44,9 @@ public class InternalNestedTests extends InternalSingleBucketAggregationTestCase
protected Reader<InternalNested> instanceReader() {
return InternalNested::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedNested.class;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.nested;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -43,4 +44,9 @@ public class InternalReverseNestedTests extends InternalSingleBucketAggregationT
protected Reader<InternalReverseNested> instanceReader() {
return InternalReverseNested::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedReverseNested.class;
}
}

View File

@ -23,47 +23,61 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class InternalBinaryRangeTests extends InternalAggregationTestCase<InternalBinaryRange> {
private Tuple<BytesRef, BytesRef>[] RANGES;
public class InternalBinaryRangeTests extends InternalRangeTestCase<InternalBinaryRange> {
private List<Tuple<BytesRef, BytesRef>> ranges;
@Override
public void setUp() throws Exception {
super.setUp();
final int numRanges = randomIntBetween(1, 10);
ranges = new ArrayList<>(numRanges);
@Before
public void randomSortedRanges() {
int numRanges = randomIntBetween(1, 10);
Tuple<BytesRef, BytesRef>[] ranges = new Tuple[numRanges];
for (int i = 0; i < numRanges; i++) {
BytesRef[] values = new BytesRef[2];
values[0] = new BytesRef(randomAlphaOfLength(15));
values[1] = new BytesRef(randomAlphaOfLength(15));
Arrays.sort(values);
ranges[i] = new Tuple(values[0], values[1]);
ranges.add(Tuple.tuple(values[0], values[1]));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(null, new BytesRef(randomAlphaOfLength(15))));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(new BytesRef(randomAlphaOfLength(15)), null));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(null, null));
}
Arrays.sort(ranges, (t1, t2) -> t1.v1().compareTo(t2.v1()));
RANGES = ranges;
}
@Override
protected InternalBinaryRange createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
boolean keyed = randomBoolean();
protected InternalBinaryRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) {
DocValueFormat format = DocValueFormat.RAW;
List<InternalBinaryRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < RANGES.length; ++i) {
int nullKey = randomBoolean() ? randomIntBetween(0, ranges.size() -1) : -1;
for (int i = 0; i < ranges.size(); ++i) {
final int docCount = randomIntBetween(1, 100);
buckets.add(new InternalBinaryRange.Bucket(format, keyed, randomAlphaOfLength(10),
RANGES[i].v1(), RANGES[i].v2(), docCount, InternalAggregations.EMPTY));
final String key = (i == nullKey) ? null: randomAlphaOfLength(10);
buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations));
}
return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, Collections.emptyMap());
return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, metaData);
}
@Override
@ -71,6 +85,11 @@ public class InternalBinaryRangeTests extends InternalAggregationTestCase<Intern
return InternalBinaryRange::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedBinaryRange.class;
}
@Override
protected void assertReduced(InternalBinaryRange reduced, List<InternalBinaryRange> inputs) {
int pos = 0;
@ -86,4 +105,14 @@ public class InternalBinaryRangeTests extends InternalAggregationTestCase<Intern
pos ++;
}
}
@Override
protected Class<? extends InternalMultiBucketAggregation.InternalBucket> internalRangeBucketClass() {
return InternalBinaryRange.Bucket.class;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
return ParsedBinaryRange.ParsedBucket.class;
}
}

View File

@ -20,33 +20,39 @@
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public abstract class InternalRangeTestCase<T extends InternalAggregation & Range> extends InternalAggregationTestCase<T> {
public abstract class InternalRangeTestCase<T extends InternalAggregation & Range> extends InternalMultiBucketAggregationTestCase<T> {
private boolean keyed;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
}
@Override
protected T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, keyed);
protected T createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
return createTestInstance(name, pipelineAggregators, metaData, aggregations, keyed);
}
protected abstract T createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed);
@Override
protected void assertReduced(T reduced, List<T> inputs) {
@ -65,4 +71,29 @@ public abstract class InternalRangeTestCase<T extends InternalAggregation & Rang
}
assertEquals(expectedCounts, actualCounts);
}
@Override
protected final void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) {
super.assertBucket(expected, actual, checkOrder);
Class<?> internalBucketClass = internalRangeBucketClass();
assertNotNull("Internal bucket class must not be null", internalBucketClass);
assertTrue(internalBucketClass.isInstance(expected));
Class<?> parsedBucketClass = parsedRangeBucketClass();
assertNotNull("Parsed bucket class must not be null", parsedBucketClass);
assertTrue(parsedBucketClass.isInstance(actual));
Range.Bucket expectedRange = (Range.Bucket) expected;
Range.Bucket actualRange = (Range.Bucket) actual;
assertEquals(expectedRange.getFrom(), actualRange.getFrom());
assertEquals(expectedRange.getFromAsString(), actualRange.getFromAsString());
assertEquals(expectedRange.getTo(), actualRange.getTo());
assertEquals(expectedRange.getToAsString(), actualRange.getToAsString());
}
protected abstract Class<? extends InternalMultiBucketAggregation.InternalBucket> internalRangeBucketClass();
protected abstract Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass();
}

View File

@ -23,8 +23,9 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
@ -37,7 +38,6 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
private List<Tuple<Double, Double>> ranges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
format = randomNumericDocValueFormat();
@ -58,11 +58,23 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
}
if (rarely()) {
listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
}
if (rarely()) {
listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, randomDouble()));
}
if (rarely()) {
listOfRanges.add(Tuple.tuple(randomDouble(), Double.POSITIVE_INFINITY));
}
ranges = Collections.unmodifiableList(listOfRanges);
}
@Override
protected InternalRange createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
protected InternalRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) {
final List<InternalRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < ranges.size(); ++i) {
@ -70,13 +82,28 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add( new InternalRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format));
buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format));
}
return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, Collections.emptyMap());
return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, metaData);
}
@Override
protected Writeable.Reader<InternalRange> instanceReader() {
return InternalRange::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedRange.class;
}
@Override
protected Class<? extends InternalMultiBucketAggregation.InternalBucket> internalRangeBucketClass() {
return InternalRange.Bucket.class;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
return ParsedRange.ParsedBucket.class;
}
}

View File

@ -23,11 +23,12 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
@ -41,7 +42,6 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
private List<Tuple<Double, Double>> dateRanges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
format = randomNumericDocValueFormat();
@ -78,6 +78,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
protected InternalDateRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) {
final List<InternalDateRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < dateRanges.size(); ++i) {
@ -85,7 +86,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add( new InternalDateRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format));
buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format));
}
return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData);
}
@ -94,4 +95,19 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
protected Writeable.Reader<InternalDateRange> instanceReader() {
return InternalDateRange::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedDateRange.class;
}
@Override
protected Class<? extends InternalMultiBucketAggregation.InternalBucket> internalRangeBucketClass() {
return InternalDateRange.Bucket.class;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
return ParsedDateRange.ParsedBucket.class;
}
}

View File

@ -22,9 +22,10 @@ package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
@ -36,7 +37,6 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
private List<Tuple<Double, Double>> geoDistanceRanges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
@ -58,6 +58,7 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
}
geoDistanceRanges = Collections.unmodifiableList(listOfRanges);
}
@Override
protected Writeable.Reader<InternalGeoDistance> instanceReader() {
return InternalGeoDistance::new;
@ -67,6 +68,7 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
protected InternalGeoDistance createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) {
final List<InternalGeoDistance.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < geoDistanceRanges.size(); ++i) {
@ -74,8 +76,23 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed));
buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed));
}
return new InternalGeoDistance(name, buckets, keyed, pipelineAggregators, metaData);
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedGeoDistance.class;
}
@Override
protected Class<? extends InternalMultiBucketAggregation.InternalBucket> internalRangeBucketClass() {
return InternalGeoDistance.Bucket.class;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
return ParsedGeoDistance.ParsedBucket.class;
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
@ -42,4 +43,9 @@ public class InternalSamplerTests extends InternalSingleBucketAggregationTestCas
protected Writeable.Reader<InternalSampler> instanceReader() {
return InternalSampler::new;
}
@Override
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedSampler.class;
}
}

View File

@ -19,8 +19,9 @@
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Arrays;
import java.util.HashMap;
@ -30,7 +31,7 @@ import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public abstract class InternalSignificantTermsTestCase extends InternalAggregationTestCase<InternalSignificantTerms<?, ?>> {
public abstract class InternalSignificantTermsTestCase extends InternalMultiBucketAggregationTestCase<InternalSignificantTerms<?, ?>> {
@Override
protected InternalSignificantTerms createUnmappedInstance(String name,
@ -61,6 +62,41 @@ public abstract class InternalSignificantTermsTestCase extends InternalAggregati
}
}
@Override
protected void assertMultiBucketsAggregation(MultiBucketsAggregation expected, MultiBucketsAggregation actual, boolean checkOrder) {
super.assertMultiBucketsAggregation(expected, actual, checkOrder);
assertTrue(expected instanceof InternalSignificantTerms);
assertTrue(actual instanceof ParsedSignificantTerms);
InternalSignificantTerms expectedSigTerms = (InternalSignificantTerms) expected;
ParsedSignificantTerms actualSigTerms = (ParsedSignificantTerms) actual;
assertEquals(expectedSigTerms.getSubsetSize(), actualSigTerms.getSubsetSize());
for (SignificantTerms.Bucket bucket : (SignificantTerms) expected) {
String key = bucket.getKeyAsString();
assertBucket(expectedSigTerms.getBucketByKey(key), actualSigTerms.getBucketByKey(key), checkOrder);
}
}
@Override
protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) {
super.assertBucket(expected, actual, checkOrder);
assertTrue(expected instanceof InternalSignificantTerms.Bucket);
assertTrue(actual instanceof ParsedSignificantTerms.ParsedBucket);
SignificantTerms.Bucket expectedSigTerm = (SignificantTerms.Bucket) expected;
SignificantTerms.Bucket actualSigTerm = (SignificantTerms.Bucket) actual;
assertEquals(expectedSigTerm.getSignificanceScore(), actualSigTerm.getSignificanceScore(), 0.0);
assertEquals(expectedSigTerm.getSubsetDf(), actualSigTerm.getSubsetDf());
assertEquals(expectedSigTerm.getSupersetDf(), actualSigTerm.getSupersetDf());
expectThrows(UnsupportedOperationException.class, actualSigTerm::getSubsetSize);
expectThrows(UnsupportedOperationException.class, actualSigTerm::getSupersetSize);
}
private static Map<Object, Long> toCounts(Stream<? extends SignificantTerms.Bucket> buckets,
Function<SignificantTerms.Bucket, Long> fn) {
return buckets.collect(Collectors.toMap(SignificantTerms.Bucket::getKey, fn, Long::sum));

View File

@ -21,13 +21,14 @@ package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.HashSet;
@ -35,22 +36,23 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.search.aggregations.InternalAggregations.EMPTY;
public class SignificantLongTermsTests extends InternalSignificantTermsTestCase {
private SignificanceHeuristic significanceHeuristic;
private DocValueFormat format;
@Before
public void setUpSignificanceHeuristic() {
@Override
public void setUp() throws Exception {
super.setUp();
significanceHeuristic = randomSignificanceHeuristic();
format = randomNumericDocValueFormat();
}
@Override
protected InternalSignificantTerms createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
DocValueFormat format = DocValueFormat.RAW;
Map<String, Object> metaData,
InternalAggregations aggregations) {
int requiredSize = randomIntBetween(1, 5);
int shardSize = requiredSize + 2;
final int numBuckets = randomInt(shardSize);
@ -70,7 +72,7 @@ public class SignificantLongTermsTests extends InternalSignificantTermsTestCase
globalSubsetSize += subsetDf;
globalSupersetSize += supersetSize;
buckets.add(new SignificantLongTerms.Bucket(subsetDf, subsetDf, supersetDf, supersetSize, term, EMPTY, format));
buckets.add(new SignificantLongTerms.Bucket(subsetDf, subsetDf, supersetDf, supersetSize, term, aggregations, format));
}
return new SignificantLongTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, globalSubsetSize,
globalSupersetSize, significanceHeuristic, buckets);
@ -81,6 +83,11 @@ public class SignificantLongTermsTests extends InternalSignificantTermsTestCase
return SignificantLongTerms::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedSignificantLongTerms.class;
}
private static SignificanceHeuristic randomSignificanceHeuristic() {
return randomFrom(
new JLHScore(),

View File

@ -22,13 +22,14 @@ package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.HashSet;
@ -36,21 +37,21 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.search.aggregations.InternalAggregations.EMPTY;
public class SignificantStringTermsTests extends InternalSignificantTermsTestCase {
private SignificanceHeuristic significanceHeuristic;
@Before
public void setUpSignificanceHeuristic() {
@Override
public void setUp() throws Exception {
super.setUp();
significanceHeuristic = randomSignificanceHeuristic();
}
@Override
protected InternalSignificantTerms createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
Map<String, Object> metaData,
InternalAggregations aggregations) {
DocValueFormat format = DocValueFormat.RAW;
int requiredSize = randomIntBetween(1, 5);
int shardSize = requiredSize + 2;
@ -71,7 +72,7 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas
globalSubsetSize += subsetDf;
globalSupersetSize += supersetSize;
buckets.add(new SignificantStringTerms.Bucket(term, subsetDf, subsetDf, supersetDf, supersetSize, EMPTY, format));
buckets.add(new SignificantStringTerms.Bucket(term, subsetDf, subsetDf, supersetDf, supersetSize, aggregations, format));
}
return new SignificantStringTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, globalSubsetSize,
globalSupersetSize, significanceHeuristic, buckets);
@ -82,6 +83,11 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas
return SignificantStringTerms::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedSignificantStringTerms.class;
}
private static SignificanceHeuristic randomSignificanceHeuristic() {
return randomFrom(
new JLHScore(),

View File

@ -21,9 +21,10 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.ArrayList;
import java.util.HashSet;
@ -34,17 +35,17 @@ import java.util.Set;
public class DoubleTermsTests extends InternalTermsTestCase {
@Override
protected InternalTerms<?, ?> createTestInstance(
String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
protected InternalTerms<?, ?> createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError) {
BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
DocValueFormat format = DocValueFormat.RAW;
boolean showTermDocCountError = false;
long docCountError = -1;
DocValueFormat format = randomNumericDocValueFormat();
long otherDocCount = 0;
List<DoubleTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomInt(shardSize);
@ -52,8 +53,7 @@ public class DoubleTermsTests extends InternalTermsTestCase {
for (int i = 0; i < numBuckets; ++i) {
double term = randomValueOtherThanMany(d -> terms.add(d) == false, random()::nextDouble);
int docCount = randomIntBetween(1, 100);
buckets.add(new DoubleTerms.Bucket(term, docCount, InternalAggregations.EMPTY,
showTermDocCountError, docCountError, format));
buckets.add(new DoubleTerms.Bucket(term, docCount, aggregations, showTermDocCountError, docCountError, format));
}
return new DoubleTerms(name, order, requiredSize, minDocCount, pipelineAggregators,
metaData, format, shardSize, showTermDocCountError, otherDocCount, buckets, docCountError);
@ -64,4 +64,9 @@ public class DoubleTermsTests extends InternalTermsTestCase {
return DoubleTerms::new;
}
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedDoubleTerms.class;
}
}

Some files were not shown because too many files have changed in this diff Show More