Remove x-pack aggregations. (#59)
This PR removes the x-pack aggregations: string_stats, top_metrics and inference. Resolves #51 Relates #2 Signed-off-by: Peter Nied <petern@amazon.com>
This commit is contained in:
parent
64d0e8aa0d
commit
15638e8386
|
@ -54,12 +54,6 @@ import org.elasticsearch.action.search.SearchScrollRequest;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.analytics.InferencePipelineAggregationBuilder;
|
||||
import org.elasticsearch.client.analytics.ParsedInference;
|
||||
import org.elasticsearch.client.analytics.ParsedStringStats;
|
||||
import org.elasticsearch.client.analytics.ParsedTopMetrics;
|
||||
import org.elasticsearch.client.analytics.StringStatsAggregationBuilder;
|
||||
import org.elasticsearch.client.analytics.TopMetricsAggregationBuilder;
|
||||
import org.elasticsearch.client.core.CountRequest;
|
||||
import org.elasticsearch.client.core.CountResponse;
|
||||
import org.elasticsearch.client.core.GetSourceRequest;
|
||||
|
@ -1802,9 +1796,6 @@ public class RestHighLevelClient implements Closeable {
|
|||
map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c));
|
||||
map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c));
|
||||
map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c));
|
||||
map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c));
|
||||
map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c));
|
||||
map.put(InferencePipelineAggregationBuilder.NAME, (p, c) -> ParsedInference.fromXContent(p, (String ) (c)));
|
||||
List<NamedXContentRegistry.Entry> entries = map.entrySet().stream()
|
||||
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
|
||||
.collect(Collectors.toList());
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
|
||||
public interface InferenceConfig extends ToXContentObject {
|
||||
/**
|
||||
* @return The name of the XContentObject that is to be serialized
|
||||
*/
|
||||
String getName();
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* For building inference pipeline aggregations
|
||||
*
|
||||
* NOTE: This extends {@linkplain AbstractPipelineAggregationBuilder} for compatibility
|
||||
* with {@link SearchSourceBuilder#aggregation(PipelineAggregationBuilder)} but it
|
||||
* doesn't support any "server" side things like {@linkplain #doWriteTo(StreamOutput)}
|
||||
* or {@linkplain #createInternal(Map)}
|
||||
*/
|
||||
public class InferencePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder<InferencePipelineAggregationBuilder> {
|
||||
|
||||
public static String NAME = "inference";
|
||||
|
||||
public static final ParseField MODEL_ID = new ParseField("model_id");
|
||||
private static final ParseField INFERENCE_CONFIG = new ParseField("inference_config");
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<InferencePipelineAggregationBuilder, String> PARSER = new ConstructingObjectParser<>(
|
||||
NAME, false,
|
||||
(args, name) -> new InferencePipelineAggregationBuilder(name, (String)args[0], (Map<String, String>) args[1])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), MODEL_ID);
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> p.mapStrings(), BUCKETS_PATH_FIELD);
|
||||
PARSER.declareNamedObject(InferencePipelineAggregationBuilder::setInferenceConfig,
|
||||
(p, c, n) -> p.namedObject(InferenceConfig.class, n, c), INFERENCE_CONFIG);
|
||||
}
|
||||
|
||||
private final Map<String, String> bucketPathMap;
|
||||
private final String modelId;
|
||||
private InferenceConfig inferenceConfig;
|
||||
|
||||
public static InferencePipelineAggregationBuilder parse(String pipelineAggregatorName,
|
||||
XContentParser parser) {
|
||||
return PARSER.apply(parser, pipelineAggregatorName);
|
||||
}
|
||||
|
||||
public InferencePipelineAggregationBuilder(String name, String modelId, Map<String, String> bucketsPath) {
|
||||
super(name, NAME, new TreeMap<>(bucketsPath).values().toArray(new String[] {}));
|
||||
this.modelId = modelId;
|
||||
this.bucketPathMap = bucketsPath;
|
||||
}
|
||||
|
||||
public void setInferenceConfig(InferenceConfig inferenceConfig) {
|
||||
this.inferenceConfig = inferenceConfig;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void validate(ValidationContext context) {
|
||||
// validation occurs on the server
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PipelineAggregator createInternal(Map<String, Object> metaData) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean overrideBucketsPath() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(MODEL_ID.getPreferredName(), modelId);
|
||||
builder.field(BUCKETS_PATH_FIELD.getPreferredName(), bucketPathMap);
|
||||
if (inferenceConfig != null) {
|
||||
builder.startObject(INFERENCE_CONFIG.getPreferredName());
|
||||
builder.field(inferenceConfig.getName(), inferenceConfig);
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), bucketPathMap, modelId, inferenceConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null || getClass() != obj.getClass()) return false;
|
||||
if (super.equals(obj) == false) return false;
|
||||
|
||||
InferencePipelineAggregationBuilder other = (InferencePipelineAggregationBuilder) obj;
|
||||
return Objects.equals(bucketPathMap, other.bucketPathMap)
|
||||
&& Objects.equals(modelId, other.modelId)
|
||||
&& Objects.equals(inferenceConfig, other.inferenceConfig);
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class parses the superset of all possible fields that may be written by
|
||||
* InferenceResults. The warning field is mutually exclusive with all the other fields.
|
||||
*
|
||||
* In the case of classification results {@link #getValue()} may return a String,
|
||||
* Boolean or a Double. For regression results {@link #getValue()} is always
|
||||
* a Double.
|
||||
*/
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class ParsedInference extends ParsedAggregation {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<ParsedInference, Void> PARSER =
|
||||
new ConstructingObjectParser<>(ParsedInference.class.getSimpleName(), true,
|
||||
args -> new ParsedInference(args[0], (List<FeatureImportance>) args[1],
|
||||
(List<TopClassEntry>) args[2], (String) args[3]));
|
||||
|
||||
public static final ParseField FEATURE_IMPORTANCE = new ParseField("feature_importance");
|
||||
public static final ParseField WARNING = new ParseField("warning");
|
||||
public static final ParseField TOP_CLASSES = new ParseField("top_classes");
|
||||
|
||||
static {
|
||||
PARSER.declareField(optionalConstructorArg(), (p, n) -> {
|
||||
Object o;
|
||||
XContentParser.Token token = p.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
o = p.text();
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
o = p.booleanValue();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
o = p.doubleValue();
|
||||
} else {
|
||||
throw new XContentParseException(p.getTokenLocation(),
|
||||
"[" + ParsedInference.class.getSimpleName() + "] failed to parse field [" + CommonFields.VALUE + "] "
|
||||
+ "value [" + token + "] is not a string, boolean or number");
|
||||
}
|
||||
return o;
|
||||
}, CommonFields.VALUE, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> FeatureImportance.fromXContent(p), FEATURE_IMPORTANCE);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TopClassEntry.fromXContent(p), TOP_CLASSES);
|
||||
PARSER.declareString(optionalConstructorArg(), WARNING);
|
||||
declareAggregationFields(PARSER);
|
||||
}
|
||||
|
||||
public static ParsedInference fromXContent(XContentParser parser, final String name) {
|
||||
ParsedInference parsed = PARSER.apply(parser, null);
|
||||
parsed.setName(name);
|
||||
return parsed;
|
||||
}
|
||||
|
||||
private final Object value;
|
||||
private final List<FeatureImportance> featureImportance;
|
||||
private final List<TopClassEntry> topClasses;
|
||||
private final String warning;
|
||||
|
||||
ParsedInference(Object value,
|
||||
List<FeatureImportance> featureImportance,
|
||||
List<TopClassEntry> topClasses,
|
||||
String warning) {
|
||||
this.value = value;
|
||||
this.warning = warning;
|
||||
this.featureImportance = featureImportance;
|
||||
this.topClasses = topClasses;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public List<FeatureImportance> getFeatureImportance() {
|
||||
return featureImportance;
|
||||
}
|
||||
|
||||
public List<TopClassEntry> getTopClasses() {
|
||||
return topClasses;
|
||||
}
|
||||
|
||||
public String getWarning() {
|
||||
return warning;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
if (warning != null) {
|
||||
builder.field(WARNING.getPreferredName(), warning);
|
||||
} else {
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), value);
|
||||
if (topClasses != null && topClasses.size() > 0) {
|
||||
builder.field(TOP_CLASSES.getPreferredName(), topClasses);
|
||||
}
|
||||
if (featureImportance != null && featureImportance.size() > 0) {
|
||||
builder.field(FEATURE_IMPORTANCE.getPreferredName(), featureImportance);
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return InferencePipelineAggregationBuilder.NAME;
|
||||
}
|
||||
}
|
|
@ -1,172 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Results from the {@code string_stats} aggregation.
|
||||
*/
|
||||
public class ParsedStringStats extends ParsedAggregation {
|
||||
private static final ParseField COUNT_FIELD = new ParseField("count");
|
||||
private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
|
||||
private static final ParseField MAX_LENGTH_FIELD = new ParseField("max_length");
|
||||
private static final ParseField AVG_LENGTH_FIELD = new ParseField("avg_length");
|
||||
private static final ParseField ENTROPY_FIELD = new ParseField("entropy");
|
||||
private static final ParseField DISTRIBUTION_FIELD = new ParseField("distribution");
|
||||
|
||||
private final long count;
|
||||
private final int minLength;
|
||||
private final int maxLength;
|
||||
private final double avgLength;
|
||||
private final double entropy;
|
||||
private final boolean showDistribution;
|
||||
private final Map<String, Double> distribution;
|
||||
|
||||
private ParsedStringStats(String name, long count, int minLength, int maxLength, double avgLength, double entropy,
|
||||
boolean showDistribution, Map<String, Double> distribution) {
|
||||
setName(name);
|
||||
this.count = count;
|
||||
this.minLength = minLength;
|
||||
this.maxLength = maxLength;
|
||||
this.avgLength = avgLength;
|
||||
this.entropy = entropy;
|
||||
this.showDistribution = showDistribution;
|
||||
this.distribution = distribution;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of non-empty fields counted.
|
||||
*/
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* The length of the shortest term.
|
||||
*/
|
||||
public int getMinLength() {
|
||||
return minLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* The length of the longest term.
|
||||
*/
|
||||
public int getMaxLength() {
|
||||
return maxLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* The average length computed over all terms.
|
||||
*/
|
||||
public double getAvgLength() {
|
||||
return avgLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* The <a href="https://en.wikipedia.org/wiki/Entropy_(information_theory)">Shannon Entropy</a>
|
||||
* value computed over all terms collected by the aggregation.
|
||||
* Shannon entropy quantifies the amount of information contained in
|
||||
* the field. It is a very useful metric for measuring a wide range of
|
||||
* properties of a data set, such as diversity, similarity,
|
||||
* randomness etc.
|
||||
*/
|
||||
public double getEntropy() {
|
||||
return entropy;
|
||||
}
|
||||
|
||||
/**
|
||||
* The probability distribution for all characters. {@code null} unless
|
||||
* explicitly requested with {@link StringStatsAggregationBuilder#showDistribution(boolean)}.
|
||||
*/
|
||||
public Map<String, Double> getDistribution() {
|
||||
return distribution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return StringStatsAggregationBuilder.NAME;
|
||||
}
|
||||
|
||||
private static final Object NULL_DISTRIBUTION_MARKER = new Object();
|
||||
public static final ConstructingObjectParser<ParsedStringStats, String> PARSER = new ConstructingObjectParser<>(
|
||||
StringStatsAggregationBuilder.NAME, true, (args, name) -> {
|
||||
long count = (long) args[0];
|
||||
boolean disributionWasExplicitNull = args[5] == NULL_DISTRIBUTION_MARKER;
|
||||
if (count == 0) {
|
||||
return new ParsedStringStats(name, count, 0, 0, 0, 0, disributionWasExplicitNull, null);
|
||||
}
|
||||
int minLength = (int) args[1];
|
||||
int maxLength = (int) args[2];
|
||||
double averageLength = (double) args[3];
|
||||
double entropy = (double) args[4];
|
||||
if (disributionWasExplicitNull) {
|
||||
return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy,
|
||||
disributionWasExplicitNull, null);
|
||||
} else {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Double> distribution = (Map<String, Double>) args[5];
|
||||
return new ParsedStringStats(name, count, minLength, maxLength, averageLength, entropy,
|
||||
distribution != null, distribution);
|
||||
}
|
||||
});
|
||||
static {
|
||||
PARSER.declareLong(constructorArg(), COUNT_FIELD);
|
||||
PARSER.declareIntOrNull(constructorArg(), 0, MIN_LENGTH_FIELD);
|
||||
PARSER.declareIntOrNull(constructorArg(), 0, MAX_LENGTH_FIELD);
|
||||
PARSER.declareDoubleOrNull(constructorArg(), 0, AVG_LENGTH_FIELD);
|
||||
PARSER.declareDoubleOrNull(constructorArg(), 0, ENTROPY_FIELD);
|
||||
PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> unmodifiableMap(p.map(HashMap::new, XContentParser::doubleValue)),
|
||||
NULL_DISTRIBUTION_MARKER, DISTRIBUTION_FIELD);
|
||||
ParsedAggregation.declareAggregationFields(PARSER);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(COUNT_FIELD.getPreferredName(), count);
|
||||
if (count == 0) {
|
||||
builder.nullField(MIN_LENGTH_FIELD.getPreferredName());
|
||||
builder.nullField(MAX_LENGTH_FIELD.getPreferredName());
|
||||
builder.nullField(AVG_LENGTH_FIELD.getPreferredName());
|
||||
builder.field(ENTROPY_FIELD.getPreferredName(), 0.0);
|
||||
} else {
|
||||
builder.field(MIN_LENGTH_FIELD.getPreferredName(), minLength);
|
||||
builder.field(MAX_LENGTH_FIELD.getPreferredName(), maxLength);
|
||||
builder.field(AVG_LENGTH_FIELD.getPreferredName(), avgLength);
|
||||
builder.field(ENTROPY_FIELD.getPreferredName(), entropy);
|
||||
}
|
||||
if (showDistribution) {
|
||||
builder.field(DISTRIBUTION_FIELD.getPreferredName(), distribution);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* Results of the {@code top_metrics} aggregation.
|
||||
*/
|
||||
public class ParsedTopMetrics extends ParsedAggregation {
|
||||
private static final ParseField TOP_FIELD = new ParseField("top");
|
||||
|
||||
private final List<TopMetrics> topMetrics;
|
||||
|
||||
private ParsedTopMetrics(String name, List<TopMetrics> topMetrics) {
|
||||
setName(name);
|
||||
this.topMetrics = topMetrics;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of top metrics, in sorted order.
|
||||
*/
|
||||
public List<TopMetrics> getTopMetrics() {
|
||||
return topMetrics;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TopMetricsAggregationBuilder.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray(TOP_FIELD.getPreferredName());
|
||||
for (TopMetrics top : topMetrics) {
|
||||
top.toXContent(builder, params);
|
||||
}
|
||||
return builder.endArray();
|
||||
}
|
||||
|
||||
public static final ConstructingObjectParser<ParsedTopMetrics, String> PARSER = new ConstructingObjectParser<>(
|
||||
TopMetricsAggregationBuilder.NAME, true, (args, name) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TopMetrics> topMetrics = (List<TopMetrics>) args[0];
|
||||
return new ParsedTopMetrics(name, topMetrics);
|
||||
});
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), (p, c) -> TopMetrics.PARSER.parse(p, null), TOP_FIELD);
|
||||
ParsedAggregation.declareAggregationFields(PARSER);
|
||||
}
|
||||
|
||||
/**
|
||||
* The metrics belonging to the document with the "top" sort key.
|
||||
*/
|
||||
public static class TopMetrics implements ToXContent {
|
||||
private static final ParseField SORT_FIELD = new ParseField("sort");
|
||||
private static final ParseField METRICS_FIELD = new ParseField("metrics");
|
||||
|
||||
private final List<Object> sort;
|
||||
private final Map<String, Object> metrics;
|
||||
|
||||
private TopMetrics(List<Object> sort, Map<String, Object> metrics) {
|
||||
this.sort = sort;
|
||||
this.metrics = metrics;
|
||||
}
|
||||
|
||||
/**
|
||||
* The sort key for these top metrics.
|
||||
*/
|
||||
public List<Object> getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* The top metric values returned by the aggregation.
|
||||
*/
|
||||
public Map<String, Object> getMetrics() {
|
||||
return metrics;
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<TopMetrics, Void> PARSER = new ConstructingObjectParser<>("top", true,
|
||||
(args, name) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> sort = (List<Object>) args[0];
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> metrics = (Map<String, Object>) args[1];
|
||||
return new TopMetrics(sort, metrics);
|
||||
});
|
||||
static {
|
||||
PARSER.declareFieldArray(constructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p),
|
||||
SORT_FIELD, ObjectParser.ValueType.VALUE_ARRAY);
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> p.map(), METRICS_FIELD);
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(SORT_FIELD.getPreferredName(), sort);
|
||||
builder.field(METRICS_FIELD.getPreferredName(), metrics);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Builds the {@code string_stats} aggregation request.
|
||||
* <p>
|
||||
* NOTE: This extends {@linkplain AbstractAggregationBuilder} for compatibility
|
||||
* with {@link SearchSourceBuilder#aggregation(AggregationBuilder)} but it
|
||||
* doesn't support any "server" side things like
|
||||
* {@linkplain Writeable#writeTo(StreamOutput)},
|
||||
* {@linkplain AggregationBuilder#rewrite(QueryRewriteContext)}, or
|
||||
* {@linkplain AbstractAggregationBuilder#build(QueryShardContext, AggregatorFactory)}.
|
||||
*/
|
||||
public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilder<StringStatsAggregationBuilder> {
|
||||
public static final String NAME = "string_stats";
|
||||
private static final ParseField SHOW_DISTRIBUTION_FIELD = new ParseField("show_distribution");
|
||||
|
||||
private boolean showDistribution = false;
|
||||
|
||||
public StringStatsAggregationBuilder(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the distribution of each character. Disabled by default.
|
||||
* @return this for chaining
|
||||
*/
|
||||
public StringStatsAggregationBuilder showDistribution(boolean showDistribution) {
|
||||
this.showDistribution = showDistribution;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceType defaultValueSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceRegistry.RegistryKey<?> getRegistryKey() {
|
||||
// This would be called from the same thing that calls innerBuild, which also throws. So it's "safe" to throw here.
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.field(StringStatsAggregationBuilder.SHOW_DISTRIBUTION_FIELD.getPreferredName(), showDistribution);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketCardinality bucketCardinality() {
|
||||
return BucketCardinality.NONE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorFactory innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config,
|
||||
AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), showDistribution);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (false == super.equals(obj)) {
|
||||
return false;
|
||||
}
|
||||
StringStatsAggregationBuilder other = (StringStatsAggregationBuilder) obj;
|
||||
return showDistribution == other.showDistribution;
|
||||
}
|
||||
}
|
|
@ -1,112 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Builds the Top Metrics aggregation request.
|
||||
* <p>
|
||||
* NOTE: This extends {@linkplain AbstractAggregationBuilder} for compatibility
|
||||
* with {@link SearchSourceBuilder#aggregation(AggregationBuilder)} but it
|
||||
* doesn't support any "server" side things like
|
||||
* {@linkplain Writeable#writeTo(StreamOutput)},
|
||||
* {@linkplain AggregationBuilder#rewrite(QueryRewriteContext)}, or
|
||||
* {@linkplain AbstractAggregationBuilder#build(QueryShardContext, AggregatorFactory)}.
|
||||
*/
|
||||
public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder<TopMetricsAggregationBuilder> {
|
||||
public static final String NAME = "top_metrics";
|
||||
|
||||
private final SortBuilder<?> sort;
|
||||
private final int size;
|
||||
private final List<String> metrics;
|
||||
|
||||
/**
|
||||
* Build the request.
|
||||
* @param name the name of the metric
|
||||
* @param sort the sort key used to select the top metrics
|
||||
* @param size number of results to return per bucket
|
||||
* @param metrics the names of the fields to select
|
||||
*/
|
||||
public TopMetricsAggregationBuilder(String name, SortBuilder<?> sort, int size, String... metrics) {
|
||||
super(name);
|
||||
this.sort = sort;
|
||||
this.size = size;
|
||||
this.metrics = Arrays.asList(metrics);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startArray("sort");
|
||||
sort.toXContent(builder, params);
|
||||
builder.endArray();
|
||||
builder.field("size", size);
|
||||
builder.startArray("metrics");
|
||||
for (String metric: metrics) {
|
||||
builder.startObject().field("field", metric).endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketCardinality bucketCardinality() {
|
||||
return BucketCardinality.NONE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subfactoriesBuilder)
|
||||
throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.action.search.SearchScrollRequest;
|
|||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
|
|
|
@ -1,160 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.analytics;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.BadApple;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.aMapWithSize;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
@BadApple(bugUrl = "https://github.com/opendistro-for-elasticsearch/search/issues/51")
|
||||
public class AnalyticsAggsIT extends ESRestHighLevelClientTestCase {
|
||||
public void testStringStats() throws IOException {
|
||||
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "message", "trying out elasticsearch"));
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "message", "more words"));
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new StringStatsAggregationBuilder("test").field("message.keyword").showDistribution(true));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedStringStats stats = response.getAggregations().get("test");
|
||||
assertThat(stats.getCount(), equalTo(2L));
|
||||
assertThat(stats.getMinLength(), equalTo(10));
|
||||
assertThat(stats.getMaxLength(), equalTo(24));
|
||||
assertThat(stats.getAvgLength(), equalTo(17.0));
|
||||
assertThat(stats.getEntropy(), closeTo(4, .1));
|
||||
assertThat(stats.getDistribution(), aMapWithSize(18));
|
||||
assertThat(stats.getDistribution(), hasEntry(equalTo("o"), closeTo(.09, .005)));
|
||||
assertThat(stats.getDistribution(), hasEntry(equalTo("r"), closeTo(.12, .005)));
|
||||
assertThat(stats.getDistribution(), hasEntry(equalTo("t"), closeTo(.09, .005)));
|
||||
}
|
||||
|
||||
public void testTopMetricsDoubleMetric() throws IOException {
|
||||
indexTopMetricsDoubleTestData();
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new TopMetricsAggregationBuilder(
|
||||
"test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v"));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedTopMetrics top = response.getAggregations().get("test");
|
||||
assertThat(top.getTopMetrics(), hasSize(1));
|
||||
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(2)));
|
||||
assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3.0)));
|
||||
}
|
||||
|
||||
public void testTopMetricsLongMetric() throws IOException {
|
||||
indexTopMetricsLongTestData();
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new TopMetricsAggregationBuilder(
|
||||
"test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v"));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedTopMetrics top = response.getAggregations().get("test");
|
||||
assertThat(top.getTopMetrics(), hasSize(1));
|
||||
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(2)));
|
||||
assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3)));
|
||||
}
|
||||
|
||||
public void testTopMetricsDateMetric() throws IOException {
|
||||
indexTopMetricsDateTestData();
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new TopMetricsAggregationBuilder(
|
||||
"test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v"));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedTopMetrics top = response.getAggregations().get("test");
|
||||
assertThat(top.getTopMetrics(), hasSize(1));
|
||||
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(2)));
|
||||
assertThat(metric.getMetrics(), equalTo(singletonMap("v", "2020-01-02T01:01:00.000Z")));
|
||||
}
|
||||
|
||||
public void testTopMetricsManyMetrics() throws IOException {
|
||||
indexTopMetricsDoubleTestData();
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new TopMetricsAggregationBuilder(
|
||||
"test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v", "m"));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedTopMetrics top = response.getAggregations().get("test");
|
||||
assertThat(top.getTopMetrics(), hasSize(1));
|
||||
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(2)));
|
||||
assertThat(metric.getMetrics(), hasEntry("v", 3.0));
|
||||
assertThat(metric.getMetrics(), hasEntry("m", 13.0));
|
||||
}
|
||||
|
||||
public void testTopMetricsSizeTwo() throws IOException {
|
||||
indexTopMetricsDoubleTestData();
|
||||
SearchRequest search = new SearchRequest("test");
|
||||
search.source().aggregation(new TopMetricsAggregationBuilder(
|
||||
"test", new FieldSortBuilder("s").order(SortOrder.DESC), 2, "v"));
|
||||
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
|
||||
ParsedTopMetrics top = response.getAggregations().get("test");
|
||||
assertThat(top.getTopMetrics(), hasSize(2));
|
||||
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(2)));
|
||||
assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3.0)));
|
||||
metric = top.getTopMetrics().get(1);
|
||||
assertThat(metric.getSort(), equalTo(singletonList(1)));
|
||||
assertThat(metric.getMetrics(), equalTo(singletonMap("v", 2.0)));
|
||||
}
|
||||
|
||||
private void indexTopMetricsDoubleTestData() throws IOException {
|
||||
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2.0, "m", 12.0));
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3.0, "m", 13.0));
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
private void indexTopMetricsLongTestData() throws IOException {
|
||||
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2));
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3));
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
private void indexTopMetricsDateTestData() throws IOException {
|
||||
CreateIndexRequest create = new CreateIndexRequest("test");
|
||||
create.mapping("{\"properties\": {\"v\": {\"type\": \"date\"}}}", XContentType.JSON);
|
||||
highLevelClient().indices().create(create, RequestOptions.DEFAULT);
|
||||
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", "2020-01-01T01:01:00Z"));
|
||||
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", "2020-01-02T01:01:00Z"));
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
}
|
|
@ -34,6 +34,7 @@ import org.junit.Before;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
public class RemoteClustersIT extends AbstractMultiClusterRemoteTestCase {
|
||||
|
||||
@Before
|
||||
|
|
Loading…
Reference in New Issue