mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-22 21:05:23 +00:00
Start using ObjectParser
for aggs. (#22048)
This is an attempt to start moving aggs parsing to `ObjectParser`. There is still A LOT to do, but ObjectParser is way better than the way aggregations parsing works today. For instance in most cases, we reject numbers that are provided as strings, which we are supposed to accept since some client languages (looking at you Perl) cannot make sure to use the appropriate types. Relates to #22009
This commit is contained in:
parent
b1cef5fdf8
commit
36f598138a
@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -43,7 +44,6 @@ import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
/**
|
||||
* A collection of tombstones for explicitly marking indices as deleted in the cluster state.
|
||||
@ -367,7 +367,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
|
||||
}
|
||||
|
||||
static BiFunction<XContentParser, ParseFieldMatcherSupplier, Tombstone> getParser() {
|
||||
static ContextParser<ParseFieldMatcherSupplier, Tombstone> getParser() {
|
||||
return (p, c) -> TOMBSTONE_PARSER.apply(p, c).build();
|
||||
}
|
||||
|
||||
|
@ -35,14 +35,7 @@ import java.util.function.BiFunction;
|
||||
* Superclass for {@link ObjectParser} and {@link ConstructingObjectParser}. Defines most of the "declare" methods so they can be shared.
|
||||
*/
|
||||
public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatcherSupplier>
|
||||
implements BiFunction<XContentParser, Context, Value> {
|
||||
/**
|
||||
* Reads an object from a parser using some context.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ContextParser<Context, T> {
|
||||
T parse(XContentParser p, Context c) throws IOException;
|
||||
}
|
||||
implements BiFunction<XContentParser, Context, Value>, ContextParser<Context, Value> {
|
||||
|
||||
/**
|
||||
* Reads an object right from the parser without any context.
|
||||
@ -54,7 +47,7 @@ public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatc
|
||||
|
||||
/**
|
||||
* Declare some field. Usually it is easier to use {@link #declareString(BiConsumer, ParseField)} or
|
||||
* {@link #declareObject(BiConsumer, BiFunction, ParseField)} rather than call this directly.
|
||||
* {@link #declareObject(BiConsumer, ContextParser, ParseField)} rather than call this directly.
|
||||
*/
|
||||
public abstract <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField,
|
||||
ValueType type);
|
||||
@ -66,8 +59,8 @@ public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatc
|
||||
declareField(consumer, (p, c) -> parser.parse(p), parseField, type);
|
||||
}
|
||||
|
||||
public <T> void declareObject(BiConsumer<Value, T> consumer, BiFunction<XContentParser, Context, T> objectParser, ParseField field) {
|
||||
declareField(consumer, (p, c) -> objectParser.apply(p, c), field, ValueType.OBJECT);
|
||||
public <T> void declareObject(BiConsumer<Value, T> consumer, ContextParser<Context, T> objectParser, ParseField field) {
|
||||
declareField(consumer, (p, c) -> objectParser.parse(p, c), field, ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public void declareFloat(BiConsumer<Value, Float> consumer, ParseField field) {
|
||||
@ -103,9 +96,9 @@ public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatc
|
||||
declareField(consumer, XContentParser::booleanValue, field, ValueType.BOOLEAN);
|
||||
}
|
||||
|
||||
public <T> void declareObjectArray(BiConsumer<Value, List<T>> consumer, BiFunction<XContentParser, Context, T> objectParser,
|
||||
public <T> void declareObjectArray(BiConsumer<Value, List<T>> consumer, ContextParser<Context, T> objectParser,
|
||||
ParseField field) {
|
||||
declareField(consumer, (p, c) -> parseArray(p, () -> objectParser.apply(p, c)), field, ValueType.OBJECT_ARRAY);
|
||||
declareField(consumer, (p, c) -> parseArray(p, () -> objectParser.parse(p, c)), field, ValueType.OBJECT_ARRAY);
|
||||
}
|
||||
|
||||
public void declareStringArray(BiConsumer<Value, List<String>> consumer, ParseField field) {
|
||||
@ -144,7 +137,7 @@ public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatc
|
||||
}
|
||||
private static <T> List<T> parseArray(XContentParser parser, IOSupplier<T> supplier) throws IOException {
|
||||
List<T> list = new ArrayList<>();
|
||||
if (parser.currentToken().isValue()) {
|
||||
if (parser.currentToken().isValue() || parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
list.add(supplier.get()); // single value
|
||||
} else {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -140,12 +140,17 @@ public final class ConstructingObjectParser<Value, Context extends ParseFieldMat
|
||||
@Override
|
||||
public Value apply(XContentParser parser, Context context) {
|
||||
try {
|
||||
return objectParser.parse(parser, new Target(parser), context).finish();
|
||||
return parse(parser, context);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + objectParser.getName() + "] failed to parse object", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value parse(XContentParser parser, Context context) throws IOException {
|
||||
return objectParser.parse(parser, new Target(parser), context).finish();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pass the {@linkplain BiConsumer} this returns the declare methods to declare a required constructor argument. See this class's
|
||||
* javadoc for an example. The order in which these are declared matters: it is the order that they come in the array passed to
|
||||
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Reads an object from a parser using some context.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ContextParser<Context, T> {
|
||||
T parse(XContentParser p, Context c) throws IOException;
|
||||
}
|
@ -47,8 +47,8 @@ import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRIN
|
||||
|
||||
/**
|
||||
* A declarative, stateless parser that turns XContent into setter calls. A single parser should be defined for each object being parsed,
|
||||
* nested elements can be added via {@link #declareObject(BiConsumer, BiFunction, ParseField)} which should be satisfied where possible by
|
||||
* passing another instance of {@link ObjectParser}, this one customized for that Object.
|
||||
* nested elements can be added via {@link #declareObject(BiConsumer, ContextParser, ParseField)} which should be satisfied where possible
|
||||
* by passing another instance of {@link ObjectParser}, this one customized for that Object.
|
||||
* <p>
|
||||
* This class works well for object that do have a constructor argument or that can be built using information available from earlier in the
|
||||
* XContent. For objects that have constructors with required arguments that are specified on the same level as other fields see
|
||||
@ -126,6 +126,7 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||
* @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)}
|
||||
* @throws IOException if an IOException occurs.
|
||||
*/
|
||||
@Override
|
||||
public Value parse(XContentParser parser, Context context) throws IOException {
|
||||
if (valueSupplier == null) {
|
||||
throw new NullPointerException("valueSupplier is not set");
|
||||
@ -463,6 +464,7 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||
OBJECT_ARRAY(START_OBJECT, START_ARRAY),
|
||||
OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN),
|
||||
OBJECT_OR_STRING(START_OBJECT, VALUE_STRING),
|
||||
OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING),
|
||||
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING);
|
||||
|
||||
private final EnumSet<XContentParser.Token> tokens;
|
||||
|
@ -25,15 +25,14 @@ import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
/**
|
||||
* Encapsulates a pipeline's id and configuration as a blob
|
||||
@ -55,7 +54,7 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
|
||||
}, new ParseField("config"), ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public static BiFunction<XContentParser, ParseFieldMatcherSupplier, PipelineConfiguration> getParser() {
|
||||
public static ContextParser<ParseFieldMatcherSupplier, PipelineConfiguration> getParser() {
|
||||
return (p, c) -> PARSER.apply(p ,c).build();
|
||||
}
|
||||
private static class Builder {
|
||||
|
@ -103,19 +103,15 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
|
||||
@ -123,24 +119,18 @@ import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregat
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
|
||||
@ -154,30 +144,21 @@ import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
@ -186,18 +167,14 @@ import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMe
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||
@ -372,34 +349,34 @@ public class SearchModule {
|
||||
}
|
||||
|
||||
private void registerAggregations(List<SearchPlugin> plugins) {
|
||||
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, new AvgParser())
|
||||
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse)
|
||||
.addResultReader(InternalAvg::new));
|
||||
registerAggregation(new AggregationSpec(SumAggregationBuilder.NAME, SumAggregationBuilder::new, new SumParser())
|
||||
registerAggregation(new AggregationSpec(SumAggregationBuilder.NAME, SumAggregationBuilder::new, SumAggregationBuilder::parse)
|
||||
.addResultReader(InternalSum::new));
|
||||
registerAggregation(new AggregationSpec(MinAggregationBuilder.NAME, MinAggregationBuilder::new, new MinParser())
|
||||
registerAggregation(new AggregationSpec(MinAggregationBuilder.NAME, MinAggregationBuilder::new, MinAggregationBuilder::parse)
|
||||
.addResultReader(InternalMin::new));
|
||||
registerAggregation(new AggregationSpec(MaxAggregationBuilder.NAME, MaxAggregationBuilder::new, new MaxParser())
|
||||
registerAggregation(new AggregationSpec(MaxAggregationBuilder.NAME, MaxAggregationBuilder::new, MaxAggregationBuilder::parse)
|
||||
.addResultReader(InternalMax::new));
|
||||
registerAggregation(new AggregationSpec(StatsAggregationBuilder.NAME, StatsAggregationBuilder::new, new StatsParser())
|
||||
registerAggregation(new AggregationSpec(StatsAggregationBuilder.NAME, StatsAggregationBuilder::new, StatsAggregationBuilder::parse)
|
||||
.addResultReader(InternalStats::new));
|
||||
registerAggregation(new AggregationSpec(ExtendedStatsAggregationBuilder.NAME, ExtendedStatsAggregationBuilder::new,
|
||||
new ExtendedStatsParser()).addResultReader(InternalExtendedStats::new));
|
||||
ExtendedStatsAggregationBuilder::parse).addResultReader(InternalExtendedStats::new));
|
||||
registerAggregation(new AggregationSpec(ValueCountAggregationBuilder.NAME, ValueCountAggregationBuilder::new,
|
||||
new ValueCountParser()).addResultReader(InternalValueCount::new));
|
||||
ValueCountAggregationBuilder::parse).addResultReader(InternalValueCount::new));
|
||||
registerAggregation(new AggregationSpec(PercentilesAggregationBuilder.NAME, PercentilesAggregationBuilder::new,
|
||||
new PercentilesParser())
|
||||
PercentilesAggregationBuilder::parse)
|
||||
.addResultReader(InternalTDigestPercentiles.NAME, InternalTDigestPercentiles::new)
|
||||
.addResultReader(InternalHDRPercentiles.NAME, InternalHDRPercentiles::new));
|
||||
registerAggregation(new AggregationSpec(PercentileRanksAggregationBuilder.NAME, PercentileRanksAggregationBuilder::new,
|
||||
new PercentileRanksParser())
|
||||
PercentileRanksAggregationBuilder::parse)
|
||||
.addResultReader(InternalTDigestPercentileRanks.NAME, InternalTDigestPercentileRanks::new)
|
||||
.addResultReader(InternalHDRPercentileRanks.NAME, InternalHDRPercentileRanks::new));
|
||||
registerAggregation(new AggregationSpec(CardinalityAggregationBuilder.NAME, CardinalityAggregationBuilder::new,
|
||||
new CardinalityParser()).addResultReader(InternalCardinality::new));
|
||||
CardinalityAggregationBuilder::parse).addResultReader(InternalCardinality::new));
|
||||
registerAggregation(new AggregationSpec(GlobalAggregationBuilder.NAME, GlobalAggregationBuilder::new,
|
||||
GlobalAggregationBuilder::parse).addResultReader(InternalGlobal::new));
|
||||
registerAggregation(new AggregationSpec(MissingAggregationBuilder.NAME, MissingAggregationBuilder::new, new MissingParser())
|
||||
.addResultReader(InternalMissing::new));
|
||||
registerAggregation(new AggregationSpec(MissingAggregationBuilder.NAME, MissingAggregationBuilder::new,
|
||||
MissingAggregationBuilder::parse).addResultReader(InternalMissing::new));
|
||||
registerAggregation(new AggregationSpec(FilterAggregationBuilder.NAME, FilterAggregationBuilder::new,
|
||||
FilterAggregationBuilder::parse).addResultReader(InternalFilter::new));
|
||||
registerAggregation(new AggregationSpec(FiltersAggregationBuilder.NAME, FiltersAggregationBuilder::new,
|
||||
@ -409,42 +386,43 @@ public class SearchModule {
|
||||
.addResultReader(InternalSampler.NAME, InternalSampler::new)
|
||||
.addResultReader(UnmappedSampler.NAME, UnmappedSampler::new));
|
||||
registerAggregation(new AggregationSpec(DiversifiedAggregationBuilder.NAME, DiversifiedAggregationBuilder::new,
|
||||
new DiversifiedSamplerParser())
|
||||
DiversifiedAggregationBuilder::parse)
|
||||
/* Reuses result readers from SamplerAggregator*/);
|
||||
registerAggregation(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new, new TermsParser())
|
||||
registerAggregation(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new,
|
||||
TermsAggregationBuilder::parse)
|
||||
.addResultReader(StringTerms.NAME, StringTerms::new)
|
||||
.addResultReader(UnmappedTerms.NAME, UnmappedTerms::new)
|
||||
.addResultReader(LongTerms.NAME, LongTerms::new)
|
||||
.addResultReader(DoubleTerms.NAME, DoubleTerms::new));
|
||||
registerAggregation(new AggregationSpec(SignificantTermsAggregationBuilder.NAME, SignificantTermsAggregationBuilder::new,
|
||||
new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry))
|
||||
SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry))
|
||||
.addResultReader(SignificantStringTerms.NAME, SignificantStringTerms::new)
|
||||
.addResultReader(SignificantLongTerms.NAME, SignificantLongTerms::new)
|
||||
.addResultReader(UnmappedSignificantTerms.NAME, UnmappedSignificantTerms::new));
|
||||
registerAggregation(new AggregationSpec(RangeAggregationBuilder.NAME, RangeAggregationBuilder::new,
|
||||
new RangeParser()).addResultReader(InternalRange::new));
|
||||
registerAggregation(new AggregationSpec(DateRangeAggregationBuilder.NAME, DateRangeAggregationBuilder::new, new DateRangeParser())
|
||||
.addResultReader(InternalDateRange::new));
|
||||
registerAggregation(new AggregationSpec(IpRangeAggregationBuilder.NAME, IpRangeAggregationBuilder::new, new IpRangeParser())
|
||||
.addResultReader(InternalBinaryRange::new));
|
||||
registerAggregation(new AggregationSpec(HistogramAggregationBuilder.NAME, HistogramAggregationBuilder::new, new HistogramParser())
|
||||
.addResultReader(InternalHistogram::new));
|
||||
RangeAggregationBuilder::parse).addResultReader(InternalRange::new));
|
||||
registerAggregation(new AggregationSpec(DateRangeAggregationBuilder.NAME, DateRangeAggregationBuilder::new,
|
||||
DateRangeAggregationBuilder::parse).addResultReader(InternalDateRange::new));
|
||||
registerAggregation(new AggregationSpec(IpRangeAggregationBuilder.NAME, IpRangeAggregationBuilder::new,
|
||||
IpRangeAggregationBuilder::parse).addResultReader(InternalBinaryRange::new));
|
||||
registerAggregation(new AggregationSpec(HistogramAggregationBuilder.NAME, HistogramAggregationBuilder::new,
|
||||
HistogramAggregationBuilder::parse).addResultReader(InternalHistogram::new));
|
||||
registerAggregation(new AggregationSpec(DateHistogramAggregationBuilder.NAME, DateHistogramAggregationBuilder::new,
|
||||
new DateHistogramParser()).addResultReader(InternalDateHistogram::new));
|
||||
DateHistogramAggregationBuilder::parse).addResultReader(InternalDateHistogram::new));
|
||||
registerAggregation(new AggregationSpec(GeoDistanceAggregationBuilder.NAME, GeoDistanceAggregationBuilder::new,
|
||||
new GeoDistanceParser()).addResultReader(InternalGeoDistance::new));
|
||||
registerAggregation(new AggregationSpec(GeoGridAggregationBuilder.NAME, GeoGridAggregationBuilder::new, new GeoHashGridParser())
|
||||
.addResultReader(InternalGeoHashGrid::new));
|
||||
GeoDistanceAggregationBuilder::parse).addResultReader(InternalGeoDistance::new));
|
||||
registerAggregation(new AggregationSpec(GeoGridAggregationBuilder.NAME, GeoGridAggregationBuilder::new,
|
||||
GeoGridAggregationBuilder::parse).addResultReader(InternalGeoHashGrid::new));
|
||||
registerAggregation(new AggregationSpec(NestedAggregationBuilder.NAME, NestedAggregationBuilder::new,
|
||||
NestedAggregationBuilder::parse).addResultReader(InternalNested::new));
|
||||
registerAggregation(new AggregationSpec(ReverseNestedAggregationBuilder.NAME, ReverseNestedAggregationBuilder::new,
|
||||
ReverseNestedAggregationBuilder::parse).addResultReader(InternalReverseNested::new));
|
||||
registerAggregation(new AggregationSpec(TopHitsAggregationBuilder.NAME, TopHitsAggregationBuilder::new,
|
||||
TopHitsAggregationBuilder::parse).addResultReader(InternalTopHits::new));
|
||||
registerAggregation(new AggregationSpec(GeoBoundsAggregationBuilder.NAME, GeoBoundsAggregationBuilder::new, new GeoBoundsParser())
|
||||
.addResultReader(InternalGeoBounds::new));
|
||||
registerAggregation(new AggregationSpec(GeoBoundsAggregationBuilder.NAME, GeoBoundsAggregationBuilder::new,
|
||||
GeoBoundsAggregationBuilder::parse).addResultReader(InternalGeoBounds::new));
|
||||
registerAggregation(new AggregationSpec(GeoCentroidAggregationBuilder.NAME, GeoCentroidAggregationBuilder::new,
|
||||
new GeoCentroidParser()).addResultReader(InternalGeoCentroid::new));
|
||||
GeoCentroidAggregationBuilder::parse).addResultReader(InternalGeoCentroid::new));
|
||||
registerAggregation(new AggregationSpec(ScriptedMetricAggregationBuilder.NAME, ScriptedMetricAggregationBuilder::new,
|
||||
ScriptedMetricAggregationBuilder::parse).addResultReader(InternalScriptedMetric::new));
|
||||
registerAggregation(new AggregationSpec(ChildrenAggregationBuilder.NAME, ChildrenAggregationBuilder::new,
|
||||
|
@ -26,11 +26,13 @@ import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.SortingNumericDocValues;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
@ -41,6 +43,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -49,9 +52,24 @@ import java.util.Objects;
|
||||
public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
|
||||
public static final String NAME = "geohash_grid";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
public static final int DEFAULT_PRECISION = 5;
|
||||
public static final int DEFAULT_MAX_NUM_CELLS = 10000;
|
||||
|
||||
private int precision = GeoHashGridParser.DEFAULT_PRECISION;
|
||||
private int requiredSize = GeoHashGridParser.DEFAULT_MAX_NUM_CELLS;
|
||||
private static final ObjectParser<GeoGridAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(GeoGridAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareGeoFields(PARSER, false, false);
|
||||
PARSER.declareInt(GeoGridAggregationBuilder::precision, GeoHashGridParams.FIELD_PRECISION);
|
||||
PARSER.declareInt(GeoGridAggregationBuilder::size, GeoHashGridParams.FIELD_SIZE);
|
||||
PARSER.declareInt(GeoGridAggregationBuilder::shardSize, GeoHashGridParams.FIELD_SHARD_SIZE);
|
||||
}
|
||||
|
||||
public static GeoGridAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new GeoGridAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private int precision = DEFAULT_PRECISION;
|
||||
private int requiredSize = DEFAULT_MAX_NUM_CELLS;
|
||||
private int shardSize = -1;
|
||||
|
||||
public GeoGridAggregationBuilder(String name) {
|
||||
|
@ -1,85 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Aggregates Geo information into cells determined by geohashes of a given precision.
|
||||
* WARNING - for high-precision geohashes it may prove necessary to use a {@link GeoBoundingBoxQueryBuilder}
|
||||
* aggregation to focus in on a smaller area to avoid generating too many buckets and using too much RAM
|
||||
*/
|
||||
public class GeoHashGridParser extends GeoPointValuesSourceParser {
|
||||
|
||||
public static final int DEFAULT_PRECISION = 5;
|
||||
public static final int DEFAULT_MAX_NUM_CELLS = 10000;
|
||||
|
||||
public GeoHashGridParser() {
|
||||
super(false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoGridAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName);
|
||||
Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION);
|
||||
if (precision != null) {
|
||||
factory.precision(precision);
|
||||
}
|
||||
Integer size = (Integer) otherOptions.get(GeoHashGridParams.FIELD_SIZE);
|
||||
if (size != null) {
|
||||
factory.size(size);
|
||||
}
|
||||
Integer shardSize = (Integer) otherOptions.get(GeoHashGridParams.FIELD_SHARD_SIZE);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.VALUE_NUMBER || token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_PRECISION)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_PRECISION, parser.intValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_SIZE)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_SIZE, parser.intValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_SHARD_SIZE)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_SHARD_SIZE, parser.intValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@ -19,12 +19,17 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.rounding.DateTimeUnit;
|
||||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -34,6 +39,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -73,6 +79,48 @@ public class DateHistogramAggregationBuilder
|
||||
DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits);
|
||||
}
|
||||
|
||||
private static final ObjectParser<DateHistogramAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(DateHistogramAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true);
|
||||
|
||||
PARSER.declareField((histogram, interval) -> {
|
||||
if (interval instanceof Long) {
|
||||
histogram.interval((long) interval);
|
||||
} else {
|
||||
histogram.dateHistogramInterval((DateHistogramInterval) interval);
|
||||
}
|
||||
}, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
return p.longValue();
|
||||
} else {
|
||||
return new DateHistogramInterval(p.text());
|
||||
}
|
||||
}, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG);
|
||||
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::offset, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
return p.longValue();
|
||||
} else {
|
||||
return DateHistogramAggregationBuilder.parseStringOffset(p.text());
|
||||
}
|
||||
}, Histogram.OFFSET_FIELD, ObjectParser.ValueType.LONG);
|
||||
|
||||
PARSER.declareBoolean(DateHistogramAggregationBuilder::keyed, Histogram.KEYED_FIELD);
|
||||
|
||||
PARSER.declareLong(DateHistogramAggregationBuilder::minDocCount, Histogram.MIN_DOC_COUNT_FIELD);
|
||||
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, ExtendedBounds.PARSER::apply,
|
||||
ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::order, DateHistogramAggregationBuilder::parseOrder,
|
||||
Histogram.ORDER_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public static DateHistogramAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new DateHistogramAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private long interval;
|
||||
private DateHistogramInterval dateHistogramInterval;
|
||||
private long offset = 0;
|
||||
@ -322,4 +370,35 @@ public class DateHistogramAggregationBuilder
|
||||
&& Objects.equals(offset, other.offset)
|
||||
&& Objects.equals(extendedBounds, other.extendedBounds);
|
||||
}
|
||||
|
||||
// similar to the parsing oh histogram orders, but also accepts _time as an alias for _key
|
||||
private static InternalOrder parseOrder(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
InternalOrder order = null;
|
||||
Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
boolean asc = "asc".equals(dir);
|
||||
if (!asc && !"desc".equals(dir)) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown order direction: [" + dir
|
||||
+ "]. Should be either [asc] or [desc]");
|
||||
}
|
||||
order = resolveOrder(currentFieldName, asc);
|
||||
}
|
||||
}
|
||||
return order;
|
||||
}
|
||||
|
||||
static InternalOrder resolveOrder(String key, boolean asc) {
|
||||
if ("_key".equals(key) || "_time".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
|
||||
}
|
||||
return new InternalOrder.Aggregation(key, asc);
|
||||
}
|
||||
}
|
||||
|
@ -1,156 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A parser for date histograms. This translates json into a
|
||||
* {@link DateHistogramAggregationBuilder} instance.
|
||||
*/
|
||||
public class DateHistogramParser extends NumericValuesSourceParser {
|
||||
|
||||
public DateHistogramParser() {
|
||||
super(true, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName);
|
||||
Object interval = otherOptions.get(Histogram.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
} else if (interval instanceof Long) {
|
||||
factory.interval((Long) interval);
|
||||
} else if (interval instanceof DateHistogramInterval) {
|
||||
factory.dateHistogramInterval((DateHistogramInterval) interval);
|
||||
} else {
|
||||
throw new IllegalStateException("Unexpected interval class: " + interval.getClass());
|
||||
}
|
||||
Long offset = (Long) otherOptions.get(Histogram.OFFSET_FIELD);
|
||||
if (offset != null) {
|
||||
factory.offset(offset);
|
||||
}
|
||||
|
||||
ExtendedBounds extendedBounds = (ExtendedBounds) otherOptions.get(ExtendedBounds.EXTENDED_BOUNDS_FIELD);
|
||||
if (extendedBounds != null) {
|
||||
factory.extendedBounds(extendedBounds);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(Histogram.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
Long minDocCount = (Long) otherOptions.get(Histogram.MIN_DOC_COUNT_FIELD);
|
||||
if (minDocCount != null) {
|
||||
factory.minDocCount(minDocCount);
|
||||
}
|
||||
InternalOrder order = (InternalOrder) otherOptions.get(Histogram.ORDER_FIELD);
|
||||
if (order != null) {
|
||||
factory.order(order);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token.isValue()) {
|
||||
if (context.matchField(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
otherOptions.put(Histogram.INTERVAL_FIELD, new DateHistogramInterval(parser.text()));
|
||||
return true;
|
||||
} else {
|
||||
otherOptions.put(Histogram.INTERVAL_FIELD, parser.longValue());
|
||||
return true;
|
||||
}
|
||||
} else if (context.matchField(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
otherOptions.put(Histogram.MIN_DOC_COUNT_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
otherOptions.put(Histogram.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
otherOptions.put(Histogram.OFFSET_FIELD,
|
||||
DateHistogramAggregationBuilder.parseStringOffset(parser.text()));
|
||||
return true;
|
||||
} else {
|
||||
otherOptions.put(Histogram.OFFSET_FIELD, parser.longValue());
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.matchField(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
InternalOrder order = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
boolean asc = "asc".equals(dir);
|
||||
if (!asc && !"desc".equals(dir)) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown order direction in aggregation ["
|
||||
+ aggregationName + "]: [" + dir
|
||||
+ "]. Should be either [asc] or [desc]");
|
||||
}
|
||||
order = resolveOrder(currentFieldName, asc);
|
||||
}
|
||||
}
|
||||
otherOptions.put(Histogram.ORDER_FIELD, order);
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, ExtendedBounds.EXTENDED_BOUNDS_FIELD)) {
|
||||
try {
|
||||
otherOptions.put(ExtendedBounds.EXTENDED_BOUNDS_FIELD,
|
||||
ExtendedBounds.PARSER.apply(parser, context::getParseFieldMatcher));
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Error parsing [{}]", e, aggregationName);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static InternalOrder resolveOrder(String key, boolean asc) {
|
||||
if ("_key".equals(key) || "_time".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
|
||||
}
|
||||
return new InternalOrder.Aggregation(key, asc);
|
||||
}
|
||||
}
|
@ -19,9 +19,16 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +38,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -43,6 +51,39 @@ public class HistogramAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, HistogramAggregationBuilder> {
|
||||
public static final String NAME = InternalHistogram.TYPE.name();
|
||||
|
||||
private static final ObjectParser<double[], ParseFieldMatcherSupplier> EXTENDED_BOUNDS_PARSER = new ObjectParser<>(
|
||||
Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(),
|
||||
() -> new double[]{ Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY });
|
||||
static {
|
||||
EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[0] = d, new ParseField("min"));
|
||||
EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[1] = d, new ParseField("max"));
|
||||
}
|
||||
|
||||
private static final ObjectParser<HistogramAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(HistogramAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
|
||||
PARSER.declareDouble(HistogramAggregationBuilder::interval, Histogram.INTERVAL_FIELD);
|
||||
|
||||
PARSER.declareDouble(HistogramAggregationBuilder::offset, Histogram.OFFSET_FIELD);
|
||||
|
||||
PARSER.declareBoolean(HistogramAggregationBuilder::keyed, Histogram.KEYED_FIELD);
|
||||
|
||||
PARSER.declareLong(HistogramAggregationBuilder::minDocCount, Histogram.MIN_DOC_COUNT_FIELD);
|
||||
|
||||
PARSER.declareField((histogram, extendedBounds) -> {
|
||||
histogram.extendedBounds(extendedBounds[0], extendedBounds[1]);
|
||||
}, EXTENDED_BOUNDS_PARSER::apply, ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField(HistogramAggregationBuilder::order, HistogramAggregationBuilder::parseOrder,
|
||||
Histogram.ORDER_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public static HistogramAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new HistogramAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private double interval;
|
||||
private double offset = 0;
|
||||
private double minBound = Double.POSITIVE_INFINITY;
|
||||
@ -246,4 +287,34 @@ public class HistogramAggregationBuilder
|
||||
&& Objects.equals(minBound, other.minBound)
|
||||
&& Objects.equals(maxBound, other.maxBound);
|
||||
}
|
||||
|
||||
private static InternalOrder parseOrder(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
InternalOrder order = null;
|
||||
Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
boolean asc = "asc".equals(dir);
|
||||
if (!asc && !"desc".equals(dir)) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown order direction: [" + dir
|
||||
+ "]. Should be either [asc] or [desc]");
|
||||
}
|
||||
order = resolveOrder(currentFieldName, asc);
|
||||
}
|
||||
}
|
||||
return order;
|
||||
}
|
||||
|
||||
static InternalOrder resolveOrder(String key, boolean asc) {
|
||||
if ("_key".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
|
||||
}
|
||||
return new InternalOrder.Aggregation(key, asc);
|
||||
}
|
||||
}
|
||||
|
@ -1,147 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A parser for date histograms. This translates json into an
|
||||
* {@link HistogramAggregationBuilder} instance.
|
||||
*/
|
||||
public class HistogramParser extends NumericValuesSourceParser {
|
||||
|
||||
private static final ObjectParser<double[], ParseFieldMatcherSupplier> EXTENDED_BOUNDS_PARSER = new ObjectParser<>(
|
||||
Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(),
|
||||
() -> new double[]{ Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY });
|
||||
static {
|
||||
EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[0] = d, new ParseField("min"));
|
||||
EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[1] = d, new ParseField("max"));
|
||||
}
|
||||
|
||||
public HistogramParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName);
|
||||
Double interval = (Double) otherOptions.get(Histogram.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
} else {
|
||||
factory.interval(interval);
|
||||
}
|
||||
Double offset = (Double) otherOptions.get(Histogram.OFFSET_FIELD);
|
||||
if (offset != null) {
|
||||
factory.offset(offset);
|
||||
}
|
||||
|
||||
double[] extendedBounds = (double[]) otherOptions.get(Histogram.EXTENDED_BOUNDS_FIELD);
|
||||
if (extendedBounds != null) {
|
||||
factory.extendedBounds(extendedBounds[0], extendedBounds[1]);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(Histogram.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
Long minDocCount = (Long) otherOptions.get(Histogram.MIN_DOC_COUNT_FIELD);
|
||||
if (minDocCount != null) {
|
||||
factory.minDocCount(minDocCount);
|
||||
}
|
||||
InternalOrder order = (InternalOrder) otherOptions.get(Histogram.ORDER_FIELD);
|
||||
if (order != null) {
|
||||
factory.order(order);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token.isValue()) {
|
||||
if (context.matchField(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
otherOptions.put(Histogram.INTERVAL_FIELD, parser.doubleValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
otherOptions.put(Histogram.MIN_DOC_COUNT_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
otherOptions.put(Histogram.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
otherOptions.put(Histogram.OFFSET_FIELD, parser.doubleValue());
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.matchField(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
InternalOrder order = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
boolean asc = "asc".equals(dir);
|
||||
if (!asc && !"desc".equals(dir)) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown order direction in aggregation ["
|
||||
+ aggregationName + "]: [" + dir
|
||||
+ "]. Should be either [asc] or [desc]");
|
||||
}
|
||||
order = resolveOrder(currentFieldName, asc);
|
||||
}
|
||||
}
|
||||
otherOptions.put(Histogram.ORDER_FIELD, order);
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, Histogram.EXTENDED_BOUNDS_FIELD)) {
|
||||
double[] bounds = EXTENDED_BOUNDS_PARSER.apply(parser, context::getParseFieldMatcher);
|
||||
otherOptions.put(Histogram.EXTENDED_BOUNDS_FIELD, bounds);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static InternalOrder resolveOrder(String key, boolean asc) {
|
||||
if ("_key".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.KEY_ASC : InternalOrder.KEY_DESC);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return (InternalOrder) (asc ? InternalOrder.COUNT_ASC : InternalOrder.COUNT_DESC);
|
||||
}
|
||||
return new InternalOrder.Aggregation(key, asc);
|
||||
}
|
||||
}
|
@ -21,7 +21,9 @@ package org.elasticsearch.search.aggregations.bucket.missing;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
@ -31,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +42,16 @@ public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||
public static final String NAME = "missing";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<MissingAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(MissingAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
|
||||
}
|
||||
|
||||
public static MissingAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new MissingAggregationBuilder(aggregationName, null), context);
|
||||
}
|
||||
|
||||
public MissingAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.missing;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class MissingParser extends AnyValuesSourceParser {
|
||||
|
||||
public MissingParser() {
|
||||
super(true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MissingAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MissingAggregationBuilder(aggregationName, targetValueType);
|
||||
}
|
||||
}
|
@ -20,13 +20,18 @@
|
||||
package org.elasticsearch.search.aggregations.bucket.range;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -34,6 +39,27 @@ public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregati
|
||||
public static final String NAME = "range";
|
||||
static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<RangeAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(RangeAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
PARSER.declareBoolean(RangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD);
|
||||
|
||||
PARSER.declareObjectArray((agg, ranges) -> {
|
||||
for (Range range : ranges) {
|
||||
agg.addRange(range);
|
||||
}
|
||||
}, RangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new RangeAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
return Range.fromXContent(parser, context.getParseFieldMatcher());
|
||||
}
|
||||
|
||||
public RangeAggregationBuilder(String name) {
|
||||
super(name, InternalRange.FACTORY);
|
||||
}
|
||||
|
@ -1,92 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class RangeParser extends NumericValuesSourceParser {
|
||||
|
||||
public RangeParser() {
|
||||
this(true, true, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by subclasses that parse slightly different kinds of ranges.
|
||||
*/
|
||||
protected RangeParser(boolean scriptable, boolean formattable, boolean timezoneAware) {
|
||||
super(scriptable, formattable, timezoneAware);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractRangeBuilder<?, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
RangeAggregationBuilder factory = new RangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<? extends Range> ranges = (List<? extends Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Range range = parseRange(parser, context.getParseFieldMatcher());
|
||||
ranges.add(range);
|
||||
}
|
||||
otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, keyed);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected Range parseRange(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return Range.fromXContent(parser, parseFieldMatcher);
|
||||
}
|
||||
}
|
@ -20,7 +20,11 @@
|
||||
package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
@ -29,6 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -37,6 +42,27 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
|
||||
public static final String NAME = "date_range";
|
||||
static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<DateRangeAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(DateRangeAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true);
|
||||
PARSER.declareBoolean(DateRangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD);
|
||||
|
||||
PARSER.declareObjectArray((agg, ranges) -> {
|
||||
for (Range range : ranges) {
|
||||
agg.addRange(range);
|
||||
}
|
||||
}, DateRangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new DateRangeAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
return Range.fromXContent(parser, context.getParseFieldMatcher());
|
||||
}
|
||||
|
||||
public DateRangeAggregationBuilder(String name) {
|
||||
super(name, InternalDateRange.FACTORY);
|
||||
}
|
||||
|
@ -1,52 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class DateRangeParser extends RangeParser {
|
||||
|
||||
public DateRangeParser() {
|
||||
super(true, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
}
|
@ -19,23 +19,31 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -45,8 +53,156 @@ import java.util.Objects;
|
||||
public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoDistanceAggregationBuilder> {
|
||||
public static final String NAME = "geo_distance";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
static final ParseField ORIGIN_FIELD = new ParseField("origin", "center", "point", "por");
|
||||
static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
|
||||
private final GeoPoint origin;
|
||||
private static final ObjectParser<GeoDistanceAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(GeoDistanceAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareGeoFields(PARSER, true, false);
|
||||
|
||||
PARSER.declareBoolean(GeoDistanceAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD);
|
||||
|
||||
PARSER.declareObjectArray((agg, ranges) -> {
|
||||
for (Range range : ranges) {
|
||||
agg.addRange(range);
|
||||
}
|
||||
}, GeoDistanceAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD);
|
||||
|
||||
PARSER.declareField(GeoDistanceAggregationBuilder::unit, p -> DistanceUnit.fromString(p.text()),
|
||||
UNIT_FIELD, ObjectParser.ValueType.STRING);
|
||||
|
||||
PARSER.declareField(GeoDistanceAggregationBuilder::distanceType, p -> GeoDistance.fromString(p.text()),
|
||||
DISTANCE_TYPE_FIELD, ObjectParser.ValueType.STRING);
|
||||
|
||||
PARSER.declareField(GeoDistanceAggregationBuilder::origin, GeoDistanceAggregationBuilder::parseGeoPoint,
|
||||
ORIGIN_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
GeoDistanceAggregationBuilder builder = PARSER.parse(context.parser(), new GeoDistanceAggregationBuilder(aggregationName), context);
|
||||
if (builder.origin() == null) {
|
||||
throw new IllegalArgumentException("Aggregation [" + aggregationName + "] must define an [origin].");
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static class Range extends RangeAggregator.Range {
|
||||
public Range(String key, Double from, Double to) {
|
||||
super(key(key, from, to), from == null ? 0 : from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public Range(StreamInput in) throws IOException {
|
||||
super(in.readOptionalString(), in.readDouble(), in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(key);
|
||||
out.writeDouble(from);
|
||||
out.writeDouble(to);
|
||||
}
|
||||
|
||||
private static String key(String key, Double from, Double to) {
|
||||
if (key != null) {
|
||||
return key;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append((from == null || from == 0) ? "*" : from);
|
||||
sb.append("-");
|
||||
sb.append((to == null || Double.isInfinite(to)) ? "*" : to);
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
||||
private static GeoPoint parseGeoPoint(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.resetFromString(parser.text());
|
||||
return point;
|
||||
}
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
double lat = Double.NaN;
|
||||
double lon = Double.NaN;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (Double.isNaN(lon)) {
|
||||
lon = parser.doubleValue();
|
||||
} else if (Double.isNaN(lat)) {
|
||||
lat = parser.doubleValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "malformed [" + ORIGIN_FIELD.getPreferredName()
|
||||
+ "]: a geo point array must be of the form [lon, lat]");
|
||||
}
|
||||
}
|
||||
return new GeoPoint(lat, lon);
|
||||
}
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
double lat = Double.NaN;
|
||||
double lon = Double.NaN;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if ("lat".equals(currentFieldName)) {
|
||||
lat = parser.doubleValue();
|
||||
} else if ("lon".equals(currentFieldName)) {
|
||||
lon = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Double.isNaN(lat) || Double.isNaN(lon)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"malformed [" + currentFieldName + "] geo point object. either [lat] or [lon] (or both) are " + "missing");
|
||||
}
|
||||
return new GeoPoint(lat, lon);
|
||||
}
|
||||
|
||||
// should not happen since we only parse geo points when we encounter a string, an object or an array
|
||||
throw new IllegalArgumentException("Unexpected token [" + token + "] while parsing geo point");
|
||||
}
|
||||
|
||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
||||
String fromAsStr = null;
|
||||
String toAsStr = null;
|
||||
double from = 0.0;
|
||||
double to = Double.POSITIVE_INFINITY;
|
||||
String key = null;
|
||||
String toOrFromOrKey = null;
|
||||
Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
toOrFromOrKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
from = parser.doubleValue();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
to = parser.doubleValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.KEY_FIELD)) {
|
||||
key = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
fromAsStr = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
toAsStr = parser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fromAsStr != null || toAsStr != null) {
|
||||
return new Range(key, Double.parseDouble(fromAsStr), Double.parseDouble(toAsStr));
|
||||
} else {
|
||||
return new Range(key, from, to);
|
||||
}
|
||||
}
|
||||
|
||||
private GeoPoint origin;
|
||||
private List<Range> ranges = new ArrayList<>();
|
||||
private DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
private GeoDistance distanceType = GeoDistance.DEFAULT;
|
||||
@ -54,14 +210,14 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||
|
||||
public GeoDistanceAggregationBuilder(String name, GeoPoint origin) {
|
||||
this(name, origin, InternalGeoDistance.FACTORY);
|
||||
if (origin == null) {
|
||||
throw new IllegalArgumentException("[origin] must not be null: [" + name + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private GeoDistanceAggregationBuilder(String name, GeoPoint origin,
|
||||
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||
if (origin == null) {
|
||||
throw new IllegalArgumentException("[origin] must not be null: [" + name + "]");
|
||||
}
|
||||
this.origin = origin;
|
||||
}
|
||||
|
||||
@ -82,6 +238,23 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||
unit = DistanceUnit.readFromStream(in);
|
||||
}
|
||||
|
||||
// for parsing
|
||||
GeoDistanceAggregationBuilder(String name) {
|
||||
this(name, null, InternalGeoDistance.FACTORY);
|
||||
}
|
||||
|
||||
GeoDistanceAggregationBuilder origin(GeoPoint origin) {
|
||||
this.origin = origin;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link GeoPoint} that is used for distance computations.
|
||||
*/
|
||||
public GeoPoint origin() {
|
||||
return origin;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(origin.lat());
|
||||
@ -222,11 +395,11 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(GeoDistanceParser.ORIGIN_FIELD.getPreferredName(), origin);
|
||||
builder.field(ORIGIN_FIELD.getPreferredName(), origin);
|
||||
builder.field(RangeAggregator.RANGES_FIELD.getPreferredName(), ranges);
|
||||
builder.field(RangeAggregator.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.field(GeoDistanceParser.UNIT_FIELD.getPreferredName(), unit);
|
||||
builder.field(GeoDistanceParser.DISTANCE_TYPE_FIELD.getPreferredName(), distanceType);
|
||||
builder.field(UNIT_FIELD.getPreferredName(), unit);
|
||||
builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), distanceType);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -1,172 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.GeoPointParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
||||
|
||||
static final ParseField ORIGIN_FIELD = new ParseField("origin", "center", "point", "por");
|
||||
static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
|
||||
private GeoPointParser geoPointParser = new GeoPointParser(GeoDistanceAggregationBuilder.TYPE, ORIGIN_FIELD);
|
||||
|
||||
public GeoDistanceParser() {
|
||||
super(true, false);
|
||||
}
|
||||
|
||||
public static class Range extends RangeAggregator.Range {
|
||||
public Range(String key, Double from, Double to) {
|
||||
super(key(key, from, to), from == null ? 0 : from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public Range(StreamInput in) throws IOException {
|
||||
super(in.readOptionalString(), in.readDouble(), in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(key);
|
||||
out.writeDouble(from);
|
||||
out.writeDouble(to);
|
||||
}
|
||||
|
||||
private static String key(String key, Double from, Double to) {
|
||||
if (key != null) {
|
||||
return key;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append((from == null || from == 0) ? "*" : from);
|
||||
sb.append("-");
|
||||
sb.append((to == null || Double.isInfinite(to)) ? "*" : to);
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoDistanceAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD);
|
||||
GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(aggregationName, origin);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
DistanceUnit unit = (DistanceUnit) otherOptions.get(UNIT_FIELD);
|
||||
if (unit != null) {
|
||||
factory.unit(unit);
|
||||
}
|
||||
GeoDistance distanceType = (GeoDistance) otherOptions.get(DISTANCE_TYPE_FIELD);
|
||||
if (distanceType != null) {
|
||||
factory.distanceType(distanceType);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (geoPointParser.token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
return true;
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.matchField(currentFieldName, UNIT_FIELD)) {
|
||||
DistanceUnit unit = DistanceUnit.fromString(parser.text());
|
||||
otherOptions.put(UNIT_FIELD, unit);
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
GeoDistance distanceType = GeoDistance.fromString(parser.text());
|
||||
otherOptions.put(DISTANCE_TYPE_FIELD, distanceType);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, keyed);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String fromAsStr = null;
|
||||
String toAsStr = null;
|
||||
double from = 0.0;
|
||||
double to = Double.POSITIVE_INFINITY;
|
||||
String key = null;
|
||||
String toOrFromOrKey = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
toOrFromOrKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (context.matchField(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
from = parser.doubleValue();
|
||||
} else if (context.matchField(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
to = parser.doubleValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.matchField(toOrFromOrKey, Range.KEY_FIELD)) {
|
||||
key = parser.text();
|
||||
} else if (context.matchField(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
fromAsStr = parser.text();
|
||||
} else if (context.matchField(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
toAsStr = parser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fromAsStr != null || toAsStr != null) {
|
||||
ranges.add(new Range(key, Double.parseDouble(fromAsStr), Double.parseDouble(toAsStr)));
|
||||
} else {
|
||||
ranges.add(new Range(key, from, to));
|
||||
}
|
||||
}
|
||||
otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@ -35,7 +35,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
|
@ -20,13 +20,21 @@ package org.elasticsearch.search.aggregations.bucket.range.ip;
|
||||
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
|
||||
@ -38,6 +46,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -53,6 +62,59 @@ public final class IpRangeAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, IpRangeAggregationBuilder> {
|
||||
public static final String NAME = "ip_range";
|
||||
private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME);
|
||||
private static final ParseField MASK_FIELD = new ParseField("mask");
|
||||
|
||||
private static final ObjectParser<IpRangeAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(IpRangeAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareBytesFields(PARSER, false, false);
|
||||
|
||||
PARSER.declareBoolean(IpRangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD);
|
||||
|
||||
PARSER.declareObjectArray((agg, ranges) -> {
|
||||
for (Range range : ranges) agg.addRange(range);
|
||||
}, IpRangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new IpRangeAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
final ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
||||
String key = null;
|
||||
String from = null;
|
||||
String to = null;
|
||||
String mask = null;
|
||||
|
||||
if (parser.currentToken() != Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ranges] must contain objects, but hit a " + parser.currentToken());
|
||||
}
|
||||
while (parser.nextToken() != Token.END_OBJECT) {
|
||||
if (parser.currentToken() == Token.FIELD_NAME) {
|
||||
continue;
|
||||
}
|
||||
if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.KEY_FIELD)) {
|
||||
key = parser.text();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.FROM_FIELD)) {
|
||||
from = parser.textOrNull();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.TO_FIELD)) {
|
||||
to = parser.textOrNull();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), MASK_FIELD)) {
|
||||
mask = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unexpected ip range parameter: [" + parser.currentName() + "]");
|
||||
}
|
||||
}
|
||||
if (mask != null) {
|
||||
if (key == null) {
|
||||
key = mask;
|
||||
}
|
||||
return new Range(key, mask);
|
||||
} else {
|
||||
return new Range(key, from, to);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Range implements ToXContent {
|
||||
|
||||
|
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range.ip;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
/**
|
||||
* A parser for ip range aggregations.
|
||||
*/
|
||||
public class IpRangeParser extends BytesValuesSourceParser {
|
||||
|
||||
private static final ParseField MASK_FIELD = new ParseField("mask");
|
||||
|
||||
public IpRangeParser() {
|
||||
super(false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?> createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
IpRangeAggregationBuilder range = new IpRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<Range> ranges = (Iterable<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) {
|
||||
for (Range r : ranges) {
|
||||
range.addRange(r);
|
||||
}
|
||||
}
|
||||
if (otherOptions.containsKey(RangeAggregator.KEYED_FIELD)) {
|
||||
range.keyed((Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD));
|
||||
}
|
||||
return range;
|
||||
}
|
||||
|
||||
private Range parseRange(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
String key = null;
|
||||
String from = null;
|
||||
String to = null;
|
||||
String mask = null;
|
||||
|
||||
if (parser.currentToken() != Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ranges] must contain objects, but hit a " + parser.currentToken());
|
||||
}
|
||||
while (parser.nextToken() != Token.END_OBJECT) {
|
||||
if (parser.currentToken() == Token.FIELD_NAME) {
|
||||
continue;
|
||||
}
|
||||
if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.KEY_FIELD)) {
|
||||
key = parser.text();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.FROM_FIELD)) {
|
||||
from = parser.textOrNull();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.TO_FIELD)) {
|
||||
to = parser.textOrNull();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), MASK_FIELD)) {
|
||||
mask = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unexpected ip range parameter: [" + parser.currentName() + "]");
|
||||
}
|
||||
}
|
||||
if (mask != null) {
|
||||
if (key == null) {
|
||||
key = mask;
|
||||
}
|
||||
return new Range(key, mask);
|
||||
} else {
|
||||
return new Range(key, from, to);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName,
|
||||
Token token,
|
||||
XContentParseContext context,
|
||||
Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
if (parser.currentToken() != Token.START_ARRAY) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ranges] must be passed as an array, but got a " + token);
|
||||
}
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Range range = parseRange(parser, context.getParseFieldMatcher());
|
||||
ranges.add(range);
|
||||
}
|
||||
otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
|
||||
return true;
|
||||
} else if (context.matchField(parser.currentName(), RangeAggregator.KEYED_FIELD)) {
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -30,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -41,6 +45,19 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||
|
||||
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
|
||||
|
||||
private static final ObjectParser<DiversifiedAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(DiversifiedAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(PARSER, true, false);
|
||||
PARSER.declareInt(DiversifiedAggregationBuilder::shardSize, SamplerAggregator.SHARD_SIZE_FIELD);
|
||||
PARSER.declareInt(DiversifiedAggregationBuilder::maxDocsPerValue, SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD);
|
||||
PARSER.declareString(DiversifiedAggregationBuilder::executionHint, SamplerAggregator.EXECUTION_HINT_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new DiversifiedAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
|
||||
private String executionHint = null;
|
||||
|
@ -1,79 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.sampler;
|
||||
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
||||
public DiversifiedSamplerParser() {
|
||||
super(true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DiversifiedAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(aggregationName);
|
||||
Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
}
|
||||
Integer maxDocsPerValue = (Integer) otherOptions.get(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD);
|
||||
if (maxDocsPerValue != null) {
|
||||
factory.maxDocsPerValue(maxDocsPerValue);
|
||||
}
|
||||
String executionHint = (String) otherOptions.get(SamplerAggregator.EXECUTION_HINT_FIELD);
|
||||
if (executionHint != null) {
|
||||
factory.executionHint(executionHint);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (context.matchField(currentFieldName, SamplerAggregator.SHARD_SIZE_FIELD)) {
|
||||
int shardSize = parser.intValue();
|
||||
otherOptions.put(SamplerAggregator.SHARD_SIZE_FIELD, shardSize);
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD)) {
|
||||
int maxDocsPerValue = parser.intValue();
|
||||
otherOptions.put(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD, maxDocsPerValue);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.matchField(currentFieldName, SamplerAggregator.EXECUTION_HINT_FIELD)) {
|
||||
String executionHint = parser.text();
|
||||
otherOptions.put(SamplerAggregator.EXECUTION_HINT_FIELD, executionHint);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@ -21,14 +21,20 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
@ -39,6 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -55,6 +62,48 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
|
||||
3, 0, 10, -1);
|
||||
static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore();
|
||||
|
||||
public static Aggregator.Parser getParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) {
|
||||
ObjectParser<SignificantTermsAggregationBuilder, QueryParseContext> parser =
|
||||
new ObjectParser<>(SignificantTermsAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(parser, true, true);
|
||||
|
||||
parser.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
|
||||
|
||||
parser.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME);
|
||||
|
||||
parser.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME);
|
||||
|
||||
parser.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME);
|
||||
|
||||
parser.declareString(SignificantTermsAggregationBuilder::executionHint, TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME);
|
||||
|
||||
parser.declareObject((b, v) -> { if (v.isPresent()) b.backgroundFilter(v.get()); },
|
||||
(p, context) -> context.parseInnerQueryBuilder(),
|
||||
SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
|
||||
|
||||
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
|
||||
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
|
||||
|
||||
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
|
||||
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
|
||||
|
||||
for (String name : significanceHeuristicParserRegistry.getNames()) {
|
||||
parser.declareObject(SignificantTermsAggregationBuilder::significanceHeuristic,
|
||||
(p, context) -> {
|
||||
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
|
||||
.lookupReturningNullIfNotFound(name, context.getParseFieldMatcher());
|
||||
return significanceHeuristicParser.parse(context);
|
||||
},
|
||||
new ParseField(name));
|
||||
}
|
||||
return new Aggregator.Parser() {
|
||||
@Override
|
||||
public AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return parser.parse(context.parser(), new SignificantTermsAggregationBuilder(aggregationName, null), context);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private IncludeExclude includeExclude = null;
|
||||
private String executionHint = null;
|
||||
private QueryBuilder filterBuilder = null;
|
||||
|
@ -1,107 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.significant;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.AbstractTermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class SignificantTermsParser extends AbstractTermsParser {
|
||||
private final ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry;
|
||||
private final IndicesQueriesRegistry queriesRegistry;
|
||||
|
||||
public SignificantTermsParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
|
||||
IndicesQueriesRegistry queriesRegistry) {
|
||||
this.significanceHeuristicParserRegistry = significanceHeuristicParserRegistry;
|
||||
this.queriesRegistry = queriesRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SignificantTermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(aggregationName, targetValueType);
|
||||
if (bucketCountThresholds != null) {
|
||||
factory.bucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
if (executionHint != null) {
|
||||
factory.executionHint(executionHint);
|
||||
}
|
||||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
|
||||
if (backgroundFilter != null) {
|
||||
factory.backgroundFilter(backgroundFilter);
|
||||
}
|
||||
SignificanceHeuristic significanceHeuristic =
|
||||
(SignificanceHeuristic) otherOptions.get(SignificantTermsAggregationBuilder.HEURISTIC);
|
||||
if (significanceHeuristic != null) {
|
||||
factory.significanceHeuristic(significanceHeuristic);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean parseSpecial(String aggregationName, XContentParseContext context, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
|
||||
.lookupReturningNullIfNotFound(currentFieldName, context.getParseFieldMatcher());
|
||||
if (significanceHeuristicParser != null) {
|
||||
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(context);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic);
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(context.getDefaultScriptLanguage(), queriesRegistry,
|
||||
context.getParser(), context.getParseFieldMatcher());
|
||||
Optional<QueryBuilder> filter = queryParseContext.parseInnerQueryBuilder();
|
||||
if (filter.isPresent()) {
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter.get());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
}
|
@ -26,8 +26,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -113,13 +113,13 @@ public class GND extends NXYSignificanceHeuristic {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParseContext context) throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
public SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException {
|
||||
XContentParser parser = context.parser();
|
||||
String givenName = parser.currentName();
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
while (!token.equals(XContentParser.Token.END_OBJECT)) {
|
||||
if (context.matchField(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
if (context.getParseFieldMatcher().match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
parser.nextToken();
|
||||
backgroundIsSuperset = parser.booleanValue();
|
||||
} else {
|
||||
|
@ -26,8 +26,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -104,9 +104,9 @@ public class JLHScore extends SignificanceHeuristic {
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
public static SignificanceHeuristic parse(QueryParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
XContentParser parser = context.parser();
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException(
|
||||
|
@ -27,8 +27,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -152,18 +152,18 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
|
||||
public abstract static class NXYParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParseContext context)
|
||||
public SignificanceHeuristic parse(QueryParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
XContentParser parser = context.parser();
|
||||
String givenName = parser.currentName();
|
||||
boolean includeNegatives = false;
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
while (!token.equals(XContentParser.Token.END_OBJECT)) {
|
||||
if (context.matchField(parser.currentName(), INCLUDE_NEGATIVES_FIELD)) {
|
||||
if (context.getParseFieldMatcher().match(parser.currentName(), INCLUDE_NEGATIVES_FIELD)) {
|
||||
parser.nextToken();
|
||||
includeNegatives = parser.booleanValue();
|
||||
} else if (context.matchField(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
} else if (context.getParseFieldMatcher().match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
parser.nextToken();
|
||||
backgroundIsSuperset = parser.booleanValue();
|
||||
} else {
|
||||
|
@ -26,8 +26,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -56,9 +56,9 @@ public class PercentageScore extends SignificanceHeuristic {
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
public static SignificanceHeuristic parse(QueryParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
XContentParser parser = context.parser();
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
|
||||
|
@ -26,12 +26,12 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -146,9 +146,9 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
return Objects.equals(script, other.script);
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
public static SignificanceHeuristic parse(QueryParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
XContentParser parser = context.parser();
|
||||
String heuristicName = parser.currentName();
|
||||
Script script = null;
|
||||
XContentParser.Token token;
|
||||
@ -157,7 +157,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
if (token.equals(XContentParser.Token.FIELD_NAME)) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (context.matchField(currentFieldName, Script.SCRIPT_PARSE_FIELD)) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -31,5 +31,5 @@ import java.io.IOException;
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface SignificanceHeuristicParser {
|
||||
SignificanceHeuristic parse(XContentParseContext context) throws IOException, ParsingException;
|
||||
SignificanceHeuristic parse(QueryParseContext context) throws IOException, ParsingException;
|
||||
}
|
||||
|
@ -1,137 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
||||
|
||||
public static final ParseField EXECUTION_HINT_FIELD_NAME = new ParseField("execution_hint");
|
||||
public static final ParseField SHARD_SIZE_FIELD_NAME = new ParseField("shard_size");
|
||||
public static final ParseField MIN_DOC_COUNT_FIELD_NAME = new ParseField("min_doc_count");
|
||||
public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count");
|
||||
public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size");
|
||||
|
||||
public IncludeExclude.Parser incExcParser = new IncludeExclude.Parser();
|
||||
|
||||
protected AbstractTermsParser() {
|
||||
super(true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final ValuesSourceAggregationBuilder<ValuesSource, ?> createFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
Map<ParseField, Object> otherOptions) {
|
||||
BucketCountThresholds bucketCountThresholds = getDefaultBucketCountThresholds();
|
||||
Integer requiredSize = (Integer) otherOptions.get(REQUIRED_SIZE_FIELD_NAME);
|
||||
if (requiredSize != null && requiredSize != -1) {
|
||||
bucketCountThresholds.setRequiredSize(requiredSize);
|
||||
}
|
||||
Integer shardSize = (Integer) otherOptions.get(SHARD_SIZE_FIELD_NAME);
|
||||
if (shardSize != null && shardSize != -1) {
|
||||
bucketCountThresholds.setShardSize(shardSize);
|
||||
}
|
||||
Long minDocCount = (Long) otherOptions.get(MIN_DOC_COUNT_FIELD_NAME);
|
||||
if (minDocCount != null && minDocCount != -1) {
|
||||
bucketCountThresholds.setMinDocCount(minDocCount);
|
||||
}
|
||||
Long shardMinDocCount = (Long) otherOptions.get(SHARD_MIN_DOC_COUNT_FIELD_NAME);
|
||||
if (shardMinDocCount != null && shardMinDocCount != -1) {
|
||||
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
|
||||
}
|
||||
SubAggCollectionMode collectMode = (SubAggCollectionMode) otherOptions.get(SubAggCollectionMode.KEY);
|
||||
String executionHint = (String) otherOptions.get(EXECUTION_HINT_FIELD_NAME);
|
||||
IncludeExclude incExc = incExcParser.createIncludeExclude(otherOptions);
|
||||
return doCreateFactory(aggregationName, valuesSourceType, targetValueType, bucketCountThresholds, collectMode, executionHint,
|
||||
incExc,
|
||||
otherOptions);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregationBuilder<ValuesSource, ?> doCreateFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode,
|
||||
String executionHint,
|
||||
IncludeExclude incExc,
|
||||
Map<ParseField, Object> otherOptions);
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (incExcParser.token(currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
return true;
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.matchField(currentFieldName, EXECUTION_HINT_FIELD_NAME)) {
|
||||
otherOptions.put(EXECUTION_HINT_FIELD_NAME, parser.text());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, SubAggCollectionMode.KEY)) {
|
||||
otherOptions.put(SubAggCollectionMode.KEY, SubAggCollectionMode.parse(parser.text(), context.getParseFieldMatcher()));
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(REQUIRED_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (context.matchField(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(REQUIRED_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, SHARD_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(SHARD_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
otherOptions.put(MIN_DOC_COUNT_FIELD_NAME, parser.longValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, SHARD_MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
otherOptions.put(SHARD_MIN_DOC_COUNT_FIELD_NAME, parser.longValue());
|
||||
return true;
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public abstract boolean parseSpecial(String aggregationName, XContentParseContext context,
|
||||
Token token, String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
|
||||
protected abstract TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds();
|
||||
|
||||
}
|
@ -19,14 +19,20 @@
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -35,6 +41,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -56,6 +63,42 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error");
|
||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
||||
|
||||
private static final ObjectParser<TermsAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(TermsAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
|
||||
|
||||
PARSER.declareBoolean(TermsAggregationBuilder::showTermDocCountError,
|
||||
TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
|
||||
PARSER.declareInt(TermsAggregationBuilder::shardSize, SHARD_SIZE_FIELD_NAME);
|
||||
|
||||
PARSER.declareLong(TermsAggregationBuilder::minDocCount, MIN_DOC_COUNT_FIELD_NAME);
|
||||
|
||||
PARSER.declareLong(TermsAggregationBuilder::shardMinDocCount, SHARD_MIN_DOC_COUNT_FIELD_NAME);
|
||||
|
||||
PARSER.declareInt(TermsAggregationBuilder::size, REQUIRED_SIZE_FIELD_NAME);
|
||||
|
||||
PARSER.declareString(TermsAggregationBuilder::executionHint, EXECUTION_HINT_FIELD_NAME);
|
||||
|
||||
PARSER.declareField(TermsAggregationBuilder::collectMode,
|
||||
(p, c) -> SubAggCollectionMode.parse(p.text(), c.getParseFieldMatcher()),
|
||||
SubAggCollectionMode.KEY, ObjectParser.ValueType.STRING);
|
||||
|
||||
PARSER.declareObjectArray(TermsAggregationBuilder::order, TermsAggregationBuilder::parseOrderParam,
|
||||
TermsAggregationBuilder.ORDER_FIELD);
|
||||
|
||||
PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
|
||||
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
|
||||
|
||||
PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
|
||||
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new TermsAggregationBuilder(aggregationName, null), context);
|
||||
}
|
||||
|
||||
private Terms.Order order = Terms.Order.compound(Terms.Order.count(false), Terms.Order.term(true));
|
||||
private IncludeExclude includeExclude = null;
|
||||
private String executionHint = null;
|
||||
@ -96,18 +139,6 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
out.writeBoolean(showTermDocCountError);
|
||||
}
|
||||
|
||||
public TermsAggregator.BucketCountThresholds bucketCountThresholds() {
|
||||
return bucketCountThresholds;
|
||||
}
|
||||
|
||||
public TermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
if (bucketCountThresholds == null) {
|
||||
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
|
||||
}
|
||||
this.bucketCountThresholds = bucketCountThresholds;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
@ -298,4 +329,46 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
private static Terms.Order parseOrderParam(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
XContentParser.Token token;
|
||||
Terms.Order orderParam = null;
|
||||
String orderKey = null;
|
||||
boolean orderAsc = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
orderKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
if ("asc".equalsIgnoreCase(dir)) {
|
||||
orderAsc = true;
|
||||
} else if ("desc".equalsIgnoreCase(dir)) {
|
||||
orderAsc = false;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unknown terms order direction [" + dir + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " for [order]");
|
||||
}
|
||||
}
|
||||
if (orderKey == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Must specify at least one field for [order]");
|
||||
} else {
|
||||
orderParam = resolveOrder(orderKey, orderAsc);
|
||||
}
|
||||
return orderParam;
|
||||
}
|
||||
|
||||
static Terms.Order resolveOrder(String key, boolean asc) {
|
||||
if ("_term".equals(key)) {
|
||||
return Order.term(asc);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return Order.count(asc);
|
||||
}
|
||||
return Order.aggregation(key, asc);
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +137,9 @@ public abstract class TermsAggregator extends BucketsAggregator {
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME.getPreferredName(), requiredSize);
|
||||
builder.field(TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
if (shardSize != -1) {
|
||||
builder.field(TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
}
|
||||
builder.field(TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount);
|
||||
builder.field(TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount);
|
||||
return builder;
|
||||
|
@ -1,172 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class TermsParser extends AbstractTermsParser {
|
||||
@Override
|
||||
protected TermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
TermsAggregationBuilder factory = new TermsAggregationBuilder(aggregationName, targetValueType);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregationBuilder.ORDER_FIELD);
|
||||
if (orderElements != null) {
|
||||
List<Terms.Order> orders = new ArrayList<>(orderElements.size());
|
||||
for (OrderElement orderElement : orderElements) {
|
||||
orders.add(resolveOrder(orderElement.key(), orderElement.asc()));
|
||||
}
|
||||
factory.order(orders);
|
||||
}
|
||||
if (bucketCountThresholds != null) {
|
||||
factory.bucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
if (collectMode != null) {
|
||||
factory.collectMode(collectMode);
|
||||
}
|
||||
if (executionHint != null) {
|
||||
factory.executionHint(executionHint);
|
||||
}
|
||||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
if (showTermDocCountError != null) {
|
||||
factory.showTermDocCountError(showTermDocCountError);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean parseSpecial(String aggregationName, XContentParseContext context, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
List<OrderElement> orderElements = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
OrderElement orderParam = parseOrderParam(aggregationName, parser);
|
||||
orderElements.add(orderParam);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "].");
|
||||
}
|
||||
}
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, orderElements);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private OrderElement parseOrderParam(String aggregationName, XContentParser parser) throws IOException {
|
||||
XContentParser.Token token;
|
||||
OrderElement orderParam = null;
|
||||
String orderKey = null;
|
||||
boolean orderAsc = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
orderKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String dir = parser.text();
|
||||
if ("asc".equalsIgnoreCase(dir)) {
|
||||
orderAsc = true;
|
||||
} else if ("desc".equalsIgnoreCase(dir)) {
|
||||
orderAsc = false;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unknown terms order direction [" + dir + "] in terms aggregation [" + aggregationName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " for [order] in [" + aggregationName + "].");
|
||||
}
|
||||
}
|
||||
if (orderKey == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Must specify at least one field for [order] in [" + aggregationName + "].");
|
||||
} else {
|
||||
orderParam = new OrderElement(orderKey, orderAsc);
|
||||
}
|
||||
return orderParam;
|
||||
}
|
||||
|
||||
static class OrderElement {
|
||||
private final String key;
|
||||
private final boolean asc;
|
||||
|
||||
public OrderElement(String key, boolean asc) {
|
||||
this.key = key;
|
||||
this.asc = asc;
|
||||
}
|
||||
|
||||
public String key() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public boolean asc() {
|
||||
return asc;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
|
||||
static Terms.Order resolveOrder(String key, boolean asc) {
|
||||
if ("_term".equals(key)) {
|
||||
return Order.term(asc);
|
||||
}
|
||||
if ("_count".equals(key)) {
|
||||
return Order.count(asc);
|
||||
}
|
||||
return Order.aggregation(key, asc);
|
||||
}
|
||||
}
|
@ -45,12 +45,12 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
@ -61,11 +61,85 @@ import java.util.TreeSet;
|
||||
* exclusion has precedence, where the {@code include} is evaluated first and then the {@code exclude}.
|
||||
*/
|
||||
public class IncludeExclude implements Writeable, ToXContent {
|
||||
private static final ParseField INCLUDE_FIELD = new ParseField("include");
|
||||
private static final ParseField EXCLUDE_FIELD = new ParseField("exclude");
|
||||
private static final ParseField PATTERN_FIELD = new ParseField("pattern");
|
||||
private static final ParseField PARTITION_FIELD = new ParseField("partition");
|
||||
private static final ParseField NUM_PARTITIONS_FIELD = new ParseField("num_partitions");
|
||||
public static final ParseField INCLUDE_FIELD = new ParseField("include");
|
||||
public static final ParseField EXCLUDE_FIELD = new ParseField("exclude");
|
||||
public static final ParseField PATTERN_FIELD = new ParseField("pattern");
|
||||
public static final ParseField PARTITION_FIELD = new ParseField("partition");
|
||||
public static final ParseField NUM_PARTITIONS_FIELD = new ParseField("num_partitions");
|
||||
|
||||
// for parsing purposes only
|
||||
// TODO: move all aggs to the same package so that this stuff could be pkg-private
|
||||
public static IncludeExclude merge(IncludeExclude include, IncludeExclude exclude) {
|
||||
if (include == null) {
|
||||
return exclude;
|
||||
}
|
||||
if (exclude == null) {
|
||||
return include;
|
||||
}
|
||||
if (include.isPartitionBased()) {
|
||||
throw new IllegalArgumentException("Cannot specify any excludes when using a partition-based include");
|
||||
}
|
||||
String includeMethod = include.isRegexBased() ? "regex" : "set";
|
||||
String excludeMethod = exclude.isRegexBased() ? "regex" : "set";
|
||||
if (includeMethod.equals(excludeMethod) == false) {
|
||||
throw new IllegalArgumentException("Cannot mix a " + includeMethod + "-based include with a "
|
||||
+ excludeMethod + "-based method");
|
||||
}
|
||||
if (include.isRegexBased()) {
|
||||
return new IncludeExclude(include.include, exclude.exclude);
|
||||
} else {
|
||||
return new IncludeExclude(include.includeValues, exclude.excludeValues);
|
||||
}
|
||||
}
|
||||
|
||||
public static IncludeExclude parseInclude(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
return new IncludeExclude(parser.text(), null);
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
return new IncludeExclude(new TreeSet<>(parseArrayToSet(parser)), null);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
||||
String currentFieldName = null;
|
||||
Integer partition = null, numPartitions = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
// This "include":{"pattern":"foo.*"} syntax is undocumented since 2.0
|
||||
// Regexes should be "include":"foo.*"
|
||||
if (parseFieldMatcher.match(currentFieldName, PATTERN_FIELD)) {
|
||||
return new IncludeExclude(parser.text(), null);
|
||||
} else if (parseFieldMatcher.match(currentFieldName, NUM_PARTITIONS_FIELD)) {
|
||||
numPartitions = parser.intValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARTITION_FIELD)) {
|
||||
partition = parser.intValue();
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
"Unknown parameter in Include/Exclude clause: " + currentFieldName);
|
||||
}
|
||||
}
|
||||
if (partition == null) {
|
||||
throw new IllegalArgumentException("Missing [" + PARTITION_FIELD.getPreferredName()
|
||||
+ "] parameter for partition-based include");
|
||||
}
|
||||
if (numPartitions == null) {
|
||||
throw new IllegalArgumentException("Missing [" + NUM_PARTITIONS_FIELD.getPreferredName()
|
||||
+ "] parameter for partition-based include");
|
||||
}
|
||||
return new IncludeExclude(partition, numPartitions);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unrecognized token for an include [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static IncludeExclude parseExclude(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
return new IncludeExclude(null, parser.text());
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
return new IncludeExclude(null, new TreeSet<>(parseArrayToSet(parser)));
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unrecognized token for an exclude [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
// The includeValue and excludeValue ByteRefs which are the result of the parsing
|
||||
// process are converted into a LongFilter when used on numeric fields
|
||||
@ -485,157 +559,18 @@ public class IncludeExclude implements Writeable, ToXContent {
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static class Parser {
|
||||
|
||||
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, INCLUDE_FIELD)) {
|
||||
otherOptions.put(INCLUDE_FIELD, parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, EXCLUDE_FIELD)) {
|
||||
otherOptions.put(EXCLUDE_FIELD, parser.text());
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, INCLUDE_FIELD)) {
|
||||
otherOptions.put(INCLUDE_FIELD, new TreeSet<>(parseArrayToSet(parser)));
|
||||
return true;
|
||||
}
|
||||
if (parseFieldMatcher.match(currentFieldName, EXCLUDE_FIELD)) {
|
||||
otherOptions.put(EXCLUDE_FIELD, new TreeSet<>(parseArrayToSet(parser)));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, INCLUDE_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
||||
// This "include":{"pattern":"foo.*"} syntax is undocumented since 2.0
|
||||
// Regexes should be "include":"foo.*"
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PATTERN_FIELD)) {
|
||||
otherOptions.put(INCLUDE_FIELD, parser.text());
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
"Unknown string parameter in Include/Exclude clause: " + currentFieldName);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, NUM_PARTITIONS_FIELD)) {
|
||||
otherOptions.put(NUM_PARTITIONS_FIELD, parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARTITION_FIELD)) {
|
||||
otherOptions.put(INCLUDE_FIELD, parser.intValue());
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
"Unknown numeric parameter in Include/Exclude clause: " + currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, EXCLUDE_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PATTERN_FIELD)) {
|
||||
otherOptions.put(EXCLUDE_FIELD, parser.text());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
private static Set<BytesRef> parseArrayToSet(XContentParser parser) throws IOException {
|
||||
final Set<BytesRef> set = new HashSet<>();
|
||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throw new ElasticsearchParseException("Missing start of array in include/exclude clause");
|
||||
}
|
||||
|
||||
private Set<BytesRef> parseArrayToSet(XContentParser parser) throws IOException {
|
||||
final Set<BytesRef> set = new HashSet<>();
|
||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throw new ElasticsearchParseException("Missing start of array in include/exclude clause");
|
||||
}
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (!parser.currentToken().isValue()) {
|
||||
throw new ElasticsearchParseException("Array elements in include/exclude clauses should be string values");
|
||||
}
|
||||
set.add(new BytesRef(parser.text()));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
public IncludeExclude createIncludeExclude(Map<ParseField, Object> otherOptions) {
|
||||
Object includeObject = otherOptions.get(INCLUDE_FIELD);
|
||||
String include = null;
|
||||
int partition = -1;
|
||||
int numPartitions = -1;
|
||||
SortedSet<BytesRef> includeValues = null;
|
||||
if (includeObject != null) {
|
||||
if (includeObject instanceof String) {
|
||||
include = (String) includeObject;
|
||||
} else if (includeObject instanceof SortedSet) {
|
||||
includeValues = (SortedSet<BytesRef>) includeObject;
|
||||
} else if (includeObject instanceof Integer) {
|
||||
partition = (Integer) includeObject;
|
||||
Object numPartitionsObject = otherOptions.get(NUM_PARTITIONS_FIELD);
|
||||
if (numPartitionsObject instanceof Integer) {
|
||||
numPartitions = (Integer) numPartitionsObject;
|
||||
if (numPartitions < 2) {
|
||||
throw new IllegalArgumentException(NUM_PARTITIONS_FIELD.getPreferredName() + " must be >1");
|
||||
}
|
||||
if (partition < 0 || partition >= numPartitions) {
|
||||
throw new IllegalArgumentException(
|
||||
PARTITION_FIELD.getPreferredName() + " must be >=0 and <" + numPartitions);
|
||||
}
|
||||
} else {
|
||||
if (numPartitionsObject == null) {
|
||||
throw new IllegalArgumentException(NUM_PARTITIONS_FIELD.getPreferredName() + " parameter is missing");
|
||||
}
|
||||
throw new IllegalArgumentException(NUM_PARTITIONS_FIELD.getPreferredName() + " value must be an integer");
|
||||
}
|
||||
}
|
||||
}
|
||||
Object excludeObject = otherOptions.get(EXCLUDE_FIELD);
|
||||
if (numPartitions >0 ){
|
||||
if(excludeObject!=null){
|
||||
throw new IllegalArgumentException("Partitioned Include cannot be used in combination with excludes");
|
||||
}
|
||||
return new IncludeExclude(partition, numPartitions);
|
||||
}
|
||||
|
||||
|
||||
String exclude = null;
|
||||
SortedSet<BytesRef> excludeValues = null;
|
||||
if (excludeObject != null) {
|
||||
if (excludeObject instanceof String) {
|
||||
exclude = (String) excludeObject;
|
||||
} else if (excludeObject instanceof SortedSet) {
|
||||
excludeValues = (SortedSet<BytesRef>) excludeObject;
|
||||
}
|
||||
}
|
||||
RegExp includePattern = include != null ? new RegExp(include) : null;
|
||||
RegExp excludePattern = exclude != null ? new RegExp(exclude) : null;
|
||||
if (includePattern != null || excludePattern != null) {
|
||||
if (includeValues != null || excludeValues != null) {
|
||||
throw new IllegalArgumentException("Can only use regular expression include/exclude or a set of values, not both");
|
||||
}
|
||||
return new IncludeExclude(includePattern, excludePattern);
|
||||
} else if (includeValues != null || excludeValues != null) {
|
||||
return new IncludeExclude(includeValues, excludeValues);
|
||||
} else {
|
||||
return null;
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (!parser.currentToken().isValue()) {
|
||||
throw new ElasticsearchParseException("Array elements in include/exclude clauses should be string values");
|
||||
}
|
||||
set.add(new BytesRef(parser.text()));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
public boolean isRegexBased() {
|
||||
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.avg;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,16 @@ public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||
public static final String NAME = "avg";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<AvgAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new AvgAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public AvgAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.avg;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class AvgParser extends NumericValuesSourceParser {
|
||||
|
||||
public AvgParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AvgAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new AvgAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -22,8 +22,11 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -42,8 +46,21 @@ public final class CardinalityAggregationBuilder
|
||||
public static final String NAME = "cardinality";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
|
||||
public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold");
|
||||
|
||||
private static final ObjectParser<CardinalityAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(CardinalityAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(PARSER, true, false);
|
||||
PARSER.declareLong(CardinalityAggregationBuilder::precisionThreshold, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
PARSER.declareLong((b, v) -> {/*ignore*/}, REHASH);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new CardinalityAggregationBuilder(aggregationName, null), context);
|
||||
}
|
||||
|
||||
private Long precisionThreshold = null;
|
||||
|
||||
public CardinalityAggregationBuilder(String name, ValueType targetValueType) {
|
||||
|
@ -1,66 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
public class CardinalityParser extends AnyValuesSourceParser {
|
||||
|
||||
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
|
||||
|
||||
public CardinalityParser() {
|
||||
super(true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CardinalityAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder(aggregationName, targetValueType);
|
||||
Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
if (precisionThreshold != null) {
|
||||
factory.precisionThreshold(precisionThreshold);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token.isValue()) {
|
||||
if (context.matchField(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, context.getParser().longValue());
|
||||
return true;
|
||||
} else if (context.matchField(currentFieldName, REHASH)) {
|
||||
// ignore
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.geobounds;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -30,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,17 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
|
||||
public static final String NAME = "geo_bounds";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<GeoBoundsAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(GeoBoundsAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareGeoFields(PARSER, false, false);
|
||||
PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new GeoBoundsAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private boolean wrapLongitude = true;
|
||||
|
||||
public GeoBoundsAggregationBuilder(String name) {
|
||||
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.geobounds;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class GeoBoundsParser extends GeoPointValuesSourceParser {
|
||||
|
||||
public GeoBoundsParser() {
|
||||
super(false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoBoundsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(aggregationName);
|
||||
Boolean wrapLongitude = (Boolean) otherOptions.get(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
|
||||
if (wrapLongitude != null) {
|
||||
factory.wrapLongitude(wrapLongitude);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (context.matchField(currentFieldName, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD)) {
|
||||
otherOptions.put(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD, context.getParser().booleanValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -30,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,16 @@ public class GeoCentroidAggregationBuilder
|
||||
public static final String NAME = "geo_centroid";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<GeoCentroidAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(GeoCentroidAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareGeoFields(PARSER, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new GeoCentroidAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public GeoCentroidAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
@ -1,52 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Parser class for {@link org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregator}
|
||||
*/
|
||||
public class GeoCentroidParser extends GeoPointValuesSourceParser {
|
||||
|
||||
public GeoCentroidParser() {
|
||||
super(true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoCentroidAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new GeoCentroidAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.max;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,16 @@ public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||
public static final String NAME = "max";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<MaxAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(MaxAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new MaxAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public MaxAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.max;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class MaxParser extends NumericValuesSourceParser {
|
||||
|
||||
public MaxParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MaxAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MaxAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -21,16 +21,21 @@ package org.elasticsearch.search.aggregations.metrics.min;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +44,16 @@ public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||
public static final String NAME = "min";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<MinAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new MinAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public MinAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.min;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class MinParser extends NumericValuesSourceParser {
|
||||
|
||||
public MinParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MinAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MinAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -1,137 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import com.carrotsearch.hppc.DoubleArrayList;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class AbstractPercentilesParser extends NumericValuesSourceParser {
|
||||
|
||||
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
||||
public static final ParseField METHOD_FIELD = new ParseField("method");
|
||||
public static final ParseField COMPRESSION_FIELD = new ParseField("compression");
|
||||
public static final ParseField NUMBER_SIGNIFICANT_DIGITS_FIELD = new ParseField("number_of_significant_value_digits");
|
||||
|
||||
public AbstractPercentilesParser(boolean formattable) {
|
||||
super(true, formattable, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.matchField(currentFieldName, keysField())) {
|
||||
DoubleArrayList values = new DoubleArrayList(10);
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
double value = parser.doubleValue();
|
||||
values.add(value);
|
||||
}
|
||||
double[] keys = values.toArray();
|
||||
otherOptions.put(keysField(), keys);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (context.matchField(currentFieldName, KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(KEYED_FIELD, keyed);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
PercentilesMethod method = PercentilesMethod.resolveFromName(currentFieldName);
|
||||
if (method == null) {
|
||||
return false;
|
||||
} else {
|
||||
otherOptions.put(METHOD_FIELD, method);
|
||||
switch (method) {
|
||||
case TDIGEST:
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (context.matchField(currentFieldName, COMPRESSION_FIELD)) {
|
||||
double compression = parser.doubleValue();
|
||||
otherOptions.put(COMPRESSION_FIELD, compression);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case HDR:
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (context.matchField(currentFieldName, NUMBER_SIGNIFICANT_DIGITS_FIELD)) {
|
||||
int numberOfSignificantValueDigits = parser.intValue();
|
||||
otherOptions.put(NUMBER_SIGNIFICANT_DIGITS_FIELD, numberOfSignificantValueDigits);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
PercentilesMethod method = (PercentilesMethod) otherOptions.getOrDefault(METHOD_FIELD, PercentilesMethod.TDIGEST);
|
||||
|
||||
double[] cdfValues = (double[]) otherOptions.get(keysField());
|
||||
Double compression = (Double) otherOptions.get(COMPRESSION_FIELD);
|
||||
Integer numberOfSignificantValueDigits = (Integer) otherOptions.get(NUMBER_SIGNIFICANT_DIGITS_FIELD);
|
||||
Boolean keyed = (Boolean) otherOptions.get(KEYED_FIELD);
|
||||
return buildFactory(aggregationName, cdfValues, method, compression, numberOfSignificantValueDigits, keyed);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] cdfValues,
|
||||
PercentilesMethod method,
|
||||
Double compression,
|
||||
Integer numberOfSignificantValueDigits, Boolean keyed);
|
||||
|
||||
protected abstract ParseField keysField();
|
||||
|
||||
}
|
@ -19,9 +19,13 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
@ -34,6 +38,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -44,6 +49,59 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
|
||||
public static final String NAME = PercentileRanks.TYPE_NAME;
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
public static final ParseField VALUES_FIELD = new ParseField("values");
|
||||
|
||||
private static class TDigestOptions {
|
||||
Double compression;
|
||||
}
|
||||
|
||||
private static final ObjectParser<TDigestOptions, QueryParseContext> TDIGEST_OPTIONS_PARSER =
|
||||
new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new);
|
||||
static {
|
||||
TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, new ParseField("compression"));
|
||||
}
|
||||
|
||||
private static class HDROptions {
|
||||
Integer numberOfSigDigits;
|
||||
}
|
||||
|
||||
private static final ObjectParser<HDROptions, QueryParseContext> HDR_OPTIONS_PARSER =
|
||||
new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new);
|
||||
static {
|
||||
HDR_OPTIONS_PARSER.declareInt((opts, numberOfSigDigits) -> opts.numberOfSigDigits = numberOfSigDigits,
|
||||
new ParseField("number_of_significant_value_digits"));
|
||||
}
|
||||
|
||||
private static final ObjectParser<PercentileRanksAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(PercentileRanksAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, false, false);
|
||||
|
||||
PARSER.declareDoubleArray(
|
||||
(b, v) -> b.values(v.stream().mapToDouble(Double::doubleValue).toArray()),
|
||||
VALUES_FIELD);
|
||||
|
||||
PARSER.declareBoolean(PercentileRanksAggregationBuilder::keyed, PercentilesAggregationBuilder.KEYED_FIELD);
|
||||
|
||||
PARSER.declareField((b, v) -> {
|
||||
b.method(PercentilesMethod.TDIGEST);
|
||||
if (v.compression != null) {
|
||||
b.compression(v.compression);
|
||||
}
|
||||
}, TDIGEST_OPTIONS_PARSER::parse, PercentilesMethod.TDIGEST.getParseField(), ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField((b, v) -> {
|
||||
b.method(PercentilesMethod.HDR);
|
||||
if (v.numberOfSigDigits != null) {
|
||||
b.numberOfSignificantValueDigits(v.numberOfSigDigits);
|
||||
}
|
||||
}, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new PercentileRanksAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private double[] values;
|
||||
private PercentilesMethod method = PercentilesMethod.TDIGEST;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
@ -174,19 +232,19 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
|
||||
return new HDRPercentileRanksAggregatorFactory(name, type, config, values, numberOfSignificantValueDigits, keyed, context,
|
||||
parent, subFactoriesBuilder, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.array(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
builder.array(VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(PercentilesAggregationBuilder.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.toString());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
builder.field(PercentilesAggregationBuilder.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
} else {
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
builder.field(PercentilesAggregationBuilder.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
@ -207,7 +265,7 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
|
||||
equalSettings = Objects.equals(compression, other.compression);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method + "]");
|
||||
}
|
||||
return equalSettings
|
||||
&& Objects.deepEquals(values, other.values)
|
||||
@ -223,7 +281,7 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
|
||||
case TDIGEST:
|
||||
return Objects.hash(Arrays.hashCode(values), keyed, compression, method);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,60 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
public class PercentileRanksParser extends AbstractPercentilesParser {
|
||||
|
||||
public static final ParseField VALUES_FIELD = new ParseField("values");
|
||||
|
||||
public PercentileRanksParser() {
|
||||
super(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ParseField keysField() {
|
||||
return VALUES_FIELD;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
if (method != null) {
|
||||
factory.method(method);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
}
|
@ -19,10 +19,14 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory;
|
||||
@ -34,6 +38,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -44,7 +49,66 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
|
||||
public static final String NAME = Percentiles.TYPE_NAME;
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private double[] percents = PercentilesParser.DEFAULT_PERCENTS;
|
||||
public static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
||||
public static final ParseField PERCENTS_FIELD = new ParseField("percents");
|
||||
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
||||
public static final ParseField METHOD_FIELD = new ParseField("method");
|
||||
public static final ParseField COMPRESSION_FIELD = new ParseField("compression");
|
||||
public static final ParseField NUMBER_SIGNIFICANT_DIGITS_FIELD = new ParseField("number_of_significant_value_digits");
|
||||
|
||||
private static class TDigestOptions {
|
||||
Double compression;
|
||||
}
|
||||
|
||||
private static final ObjectParser<TDigestOptions, QueryParseContext> TDIGEST_OPTIONS_PARSER =
|
||||
new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new);
|
||||
static {
|
||||
TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, COMPRESSION_FIELD);
|
||||
}
|
||||
|
||||
private static class HDROptions {
|
||||
Integer numberOfSigDigits;
|
||||
}
|
||||
|
||||
private static final ObjectParser<HDROptions, QueryParseContext> HDR_OPTIONS_PARSER =
|
||||
new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new);
|
||||
static {
|
||||
HDR_OPTIONS_PARSER.declareInt(
|
||||
(opts, numberOfSigDigits) -> opts.numberOfSigDigits = numberOfSigDigits,
|
||||
NUMBER_SIGNIFICANT_DIGITS_FIELD);
|
||||
}
|
||||
|
||||
private static final ObjectParser<PercentilesAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(PercentilesAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
|
||||
PARSER.declareDoubleArray(
|
||||
(b, v) -> b.percentiles(v.stream().mapToDouble(Double::doubleValue).toArray()),
|
||||
PERCENTS_FIELD);
|
||||
|
||||
PARSER.declareBoolean(PercentilesAggregationBuilder::keyed, KEYED_FIELD);
|
||||
|
||||
PARSER.declareField((b, v) -> {
|
||||
b.method(PercentilesMethod.TDIGEST);
|
||||
if (v.compression != null) {
|
||||
b.compression(v.compression);
|
||||
}
|
||||
}, TDIGEST_OPTIONS_PARSER::parse, PercentilesMethod.TDIGEST.getParseField(), ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField((b, v) -> {
|
||||
b.method(PercentilesMethod.HDR);
|
||||
if (v.numberOfSigDigits != null) {
|
||||
b.numberOfSignificantValueDigits(v.numberOfSigDigits);
|
||||
}
|
||||
}, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new PercentilesAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private double[] percents = DEFAULT_PERCENTS;
|
||||
private PercentilesMethod method = PercentilesMethod.TDIGEST;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
private double compression = 100.0;
|
||||
@ -174,19 +238,19 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
|
||||
return new HDRPercentilesAggregatorFactory(name, type, config, percents, numberOfSignificantValueDigits, keyed, context, parent,
|
||||
subFactoriesBuilder, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.array(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
builder.array(PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.toString());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
builder.field(COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
} else {
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
builder.field(NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
@ -207,7 +271,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
|
||||
equalSettings = Objects.equals(compression, other.compression);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method.toString() + "]");
|
||||
}
|
||||
return equalSettings
|
||||
&& Objects.deepEquals(percents, other.percents)
|
||||
@ -223,7 +287,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
|
||||
case TDIGEST:
|
||||
return Objects.hash(Arrays.hashCode(percents), keyed, compression, method);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
throw new IllegalStateException("Illegal method [" + method.toString() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
@ -32,23 +33,23 @@ public enum PercentilesMethod implements Writeable {
|
||||
/**
|
||||
* The TDigest method for calculating percentiles
|
||||
*/
|
||||
TDIGEST("tdigest"),
|
||||
TDIGEST("tdigest", "TDigest", "TDIGEST"),
|
||||
/**
|
||||
* The HDRHistogram method of calculating percentiles
|
||||
*/
|
||||
HDR("hdr");
|
||||
HDR("hdr", "HDR");
|
||||
|
||||
private String name;
|
||||
private final ParseField parseField;
|
||||
|
||||
private PercentilesMethod(String name) {
|
||||
this.name = name;
|
||||
private PercentilesMethod(String name, String... deprecatedNames) {
|
||||
this.parseField = new ParseField(name, deprecatedNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the name of the method
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
public ParseField getParseField() {
|
||||
return parseField;
|
||||
}
|
||||
|
||||
public static PercentilesMethod readFromStream(StreamInput in) throws IOException {
|
||||
@ -64,16 +65,8 @@ public enum PercentilesMethod implements Writeable {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PercentilesMethod} for this method name. returns
|
||||
* <code>null</code> if no {@link PercentilesMethod} exists for the name.
|
||||
*/
|
||||
public static PercentilesMethod resolveFromName(String name) {
|
||||
for (PercentilesMethod method : values()) {
|
||||
if (method.name.equalsIgnoreCase(name)) {
|
||||
return method;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
@Override
|
||||
public String toString() {
|
||||
return parseField.getPreferredName();
|
||||
}
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
public class PercentilesParser extends AbstractPercentilesParser {
|
||||
|
||||
public static final ParseField PERCENTS_FIELD = new ParseField("percents");
|
||||
|
||||
public PercentilesParser() {
|
||||
super(true);
|
||||
}
|
||||
|
||||
public static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 };
|
||||
|
||||
@Override
|
||||
protected ParseField keysField() {
|
||||
return PERCENTS_FIELD;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percentiles(keys);
|
||||
}
|
||||
if (method != null) {
|
||||
factory.method(method);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.stats;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,16 @@ public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.Leaf
|
||||
public static final String NAME = "stats";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<StatsAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(StatsAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new StatsAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public StatsAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.stats;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class StatsParser extends NumericValuesSourceParser {
|
||||
|
||||
public StatsParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new StatsAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.stats.extended;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -41,6 +45,17 @@ public class ExtendedStatsAggregationBuilder
|
||||
public static final String NAME = "extended_stats";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<ExtendedStatsAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(ExtendedStatsAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
PARSER.declareDouble(ExtendedStatsAggregationBuilder::sigma, ExtendedStatsAggregator.SIGMA_FIELD);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new ExtendedStatsAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
private double sigma = 2.0;
|
||||
|
||||
public ExtendedStatsAggregationBuilder(String name) {
|
||||
|
@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.stats.extended;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class ExtendedStatsParser extends NumericValuesSourceParser {
|
||||
|
||||
public ExtendedStatsParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (context.matchField(currentFieldName, ExtendedStatsAggregator.SIGMA_FIELD)) {
|
||||
if (token.isValue()) {
|
||||
otherOptions.put(ExtendedStatsAggregator.SIGMA_FIELD, context.getParser().doubleValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ExtendedStatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder(aggregationName);
|
||||
Double sigma = (Double) otherOptions.get(ExtendedStatsAggregator.SIGMA_FIELD);
|
||||
if (sigma != null) {
|
||||
factory.sigma(sigma);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
}
|
@ -21,8 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.sum;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
@ -31,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -39,6 +43,16 @@ public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||
public static final String NAME = "sum";
|
||||
private static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<SumAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(SumAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new SumAggregationBuilder(aggregationName), context);
|
||||
}
|
||||
|
||||
public SumAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.sum;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class SumParser extends NumericValuesSourceParser {
|
||||
|
||||
public SumParser() {
|
||||
super(true, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SumAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new SumAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
@ -21,7 +21,10 @@ package org.elasticsearch.search.aggregations.metrics.valuecount;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
@ -30,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -38,6 +42,16 @@ public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder
|
||||
public static final String NAME = "value_count";
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
private static final ObjectParser<ValueCountAggregationBuilder, QueryParseContext> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(ValueCountAggregationBuilder.NAME);
|
||||
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new ValueCountAggregationBuilder(aggregationName, null), context);
|
||||
}
|
||||
|
||||
public ValueCountAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
@ -1,50 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.valuecount;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class ValueCountParser extends AnyValuesSourceParser {
|
||||
|
||||
public ValueCountParser() {
|
||||
super(true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregationBuilder<ValuesSource, ValueCountAggregationBuilder> createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new ValueCountAggregationBuilder(aggregationName, targetValueType);
|
||||
}
|
||||
}
|
@ -1,216 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
||||
implements Aggregator.Parser {
|
||||
static final ParseField TIME_ZONE = new ParseField("time_zone");
|
||||
|
||||
public abstract static class AnyValuesSourceParser extends AbstractValuesSourceParser<ValuesSource> {
|
||||
|
||||
protected AnyValuesSourceParser(boolean scriptable, boolean formattable) {
|
||||
super(scriptable, formattable, false, ValuesSourceType.ANY, null);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class NumericValuesSourceParser extends AbstractValuesSourceParser<ValuesSource.Numeric> {
|
||||
|
||||
protected NumericValuesSourceParser(boolean scriptable, boolean formattable, boolean timezoneAware) {
|
||||
super(scriptable, formattable, timezoneAware, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class BytesValuesSourceParser extends AbstractValuesSourceParser<ValuesSource.Bytes> {
|
||||
|
||||
protected BytesValuesSourceParser(boolean scriptable, boolean formattable) {
|
||||
super(scriptable, formattable, false, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class GeoPointValuesSourceParser extends AbstractValuesSourceParser<ValuesSource.GeoPoint> {
|
||||
|
||||
protected GeoPointValuesSourceParser(boolean scriptable, boolean formattable) {
|
||||
super(scriptable, formattable, false, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean scriptable = true;
|
||||
private boolean formattable = false;
|
||||
private boolean timezoneAware = false;
|
||||
private ValuesSourceType valuesSourceType = null;
|
||||
private ValueType targetValueType = null;
|
||||
|
||||
private AbstractValuesSourceParser(boolean scriptable, boolean formattable, boolean timezoneAware, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType) {
|
||||
this.timezoneAware = timezoneAware;
|
||||
this.valuesSourceType = valuesSourceType;
|
||||
this.targetValueType = targetValueType;
|
||||
this.scriptable = scriptable;
|
||||
this.formattable = formattable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final ValuesSourceAggregationBuilder<VS, ?> parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
String field = null;
|
||||
Script script = null;
|
||||
ValueType valueType = null;
|
||||
String format = null;
|
||||
Object missing = null;
|
||||
DateTimeZone timezone = null;
|
||||
Map<ParseField, Object> otherOptions = new HashMap<>();
|
||||
XContentParseContext parserContext =
|
||||
new XContentParseContext(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("missing".equals(currentFieldName) && token.isValue()) {
|
||||
missing = parser.objectText();
|
||||
} else if (timezoneAware && context.getParseFieldMatcher().match(currentFieldName, TIME_ZONE)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
timezone = DateTimeZone.forID(parser.text());
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
timezone = DateTimeZone.forOffsetHours(parser.intValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if ("field".equals(currentFieldName)) {
|
||||
field = parser.text();
|
||||
} else if (formattable && "format".equals(currentFieldName)) {
|
||||
format = parser.text();
|
||||
} else if (scriptable) {
|
||||
if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) {
|
||||
valueType = ValueType.resolveForScript(parser.text());
|
||||
if (targetValueType != null && valueType.isNotA(targetValueType)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Aggregation [" + aggregationName + "] was configured with an incompatible value type ["
|
||||
+ valueType + "]. It can only work on value of type ["
|
||||
+ targetValueType + "]");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (scriptable && token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, Script.SCRIPT_PARSE_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
}
|
||||
|
||||
ValuesSourceAggregationBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType,
|
||||
otherOptions);
|
||||
if (field != null) {
|
||||
factory.field(field);
|
||||
}
|
||||
if (script != null) {
|
||||
factory.script(script);
|
||||
}
|
||||
if (valueType != null) {
|
||||
factory.valueType(valueType);
|
||||
}
|
||||
if (format != null) {
|
||||
factory.format(format);
|
||||
}
|
||||
if (missing != null) {
|
||||
factory.missing(missing);
|
||||
}
|
||||
if (timezone != null) {
|
||||
factory.timeZone(timezone);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link ValuesSourceAggregationBuilder} from the information
|
||||
* gathered by the subclass. Options parsed in
|
||||
* {@link AbstractValuesSourceParser} itself will be added to the factory
|
||||
* after it has been returned by this method.
|
||||
*
|
||||
* @param aggregationName
|
||||
* the name of the aggregation
|
||||
* @param valuesSourceType
|
||||
* the type of the {@link ValuesSource}
|
||||
* @param targetValueType
|
||||
* the target type of the final value output by the aggregation
|
||||
* @param otherOptions
|
||||
* a {@link Map} containing the extra options parsed by the
|
||||
* {@link #token(String, String, XContentParser.Token, XContentParseContext, Map)}
|
||||
* method
|
||||
* @return the created factory
|
||||
*/
|
||||
protected abstract ValuesSourceAggregationBuilder<VS, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions);
|
||||
|
||||
/**
|
||||
* Allows subclasses of {@link AbstractValuesSourceParser} to parse extra
|
||||
* parameters and store them in a {@link Map} which will later be passed to
|
||||
* {@link #createFactory(String, ValuesSourceType, ValueType, Map)}.
|
||||
*
|
||||
* @param aggregationName
|
||||
* the name of the aggregation
|
||||
* @param currentFieldName
|
||||
* the name of the current field being parsed
|
||||
* @param token
|
||||
* the current token for the parser
|
||||
* @param context
|
||||
* the query context
|
||||
* @param otherOptions
|
||||
* a {@link Map} of options to be populated by successive calls
|
||||
* to this method which will then be passed to the
|
||||
* {@link #createFactory(String, ValuesSourceType, ValueType, Map)}
|
||||
* method
|
||||
* @return <code>true</code> if the current token was correctly parsed,
|
||||
* <code>false</code> otherwise
|
||||
* @throws IOException
|
||||
* if an error occurs whilst parsing
|
||||
*/
|
||||
protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class GeoPointParser {
|
||||
|
||||
private final InternalAggregation.Type aggType;
|
||||
private final ParseField field;
|
||||
|
||||
public GeoPointParser(InternalAggregation.Type aggType, ParseField field) {
|
||||
this.aggType = aggType;
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
public boolean token(String aggName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (!parseFieldMatcher.match(currentFieldName, field)) {
|
||||
return false;
|
||||
}
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.resetFromString(parser.text());
|
||||
otherOptions.put(field, point);
|
||||
return true;
|
||||
}
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
double lat = Double.NaN;
|
||||
double lon = Double.NaN;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (Double.isNaN(lon)) {
|
||||
lon = parser.doubleValue();
|
||||
} else if (Double.isNaN(lat)) {
|
||||
lat = parser.doubleValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "malformed [" + currentFieldName + "] geo point array in ["
|
||||
+ aggName + "] " + aggType + " aggregation. a geo point array must be of the form [lon, lat]");
|
||||
}
|
||||
}
|
||||
GeoPoint point = new GeoPoint(lat, lon);
|
||||
otherOptions.put(field, point);
|
||||
return true;
|
||||
}
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
double lat = Double.NaN;
|
||||
double lon = Double.NaN;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if ("lat".equals(currentFieldName)) {
|
||||
lat = parser.doubleValue();
|
||||
} else if ("lon".equals(currentFieldName)) {
|
||||
lon = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Double.isNaN(lat) || Double.isNaN(lon)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"malformed [" + currentFieldName + "] geo point object. either [lat] or [lon] (or both) are " + "missing in ["
|
||||
+ aggName + "] " + aggType + " aggregation");
|
||||
}
|
||||
GeoPoint point = new GeoPoint(lat, lon);
|
||||
otherOptions.put(field, point);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
public final class ValuesSourceParserHelper {
|
||||
static final ParseField TIME_ZONE = new ParseField("time_zone");
|
||||
|
||||
private ValuesSourceParserHelper() {} // utility class, no instantiation
|
||||
|
||||
public static void declareAnyFields(
|
||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource, ?>, QueryParseContext> objectParser,
|
||||
boolean scriptable, boolean formattable) {
|
||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.ANY, null);
|
||||
}
|
||||
|
||||
public static void declareNumericFields(
|
||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, ?>, QueryParseContext> objectParser,
|
||||
boolean scriptable, boolean formattable, boolean timezoneAware) {
|
||||
declareFields(objectParser, scriptable, formattable, timezoneAware, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
public static void declareBytesFields(
|
||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?>, QueryParseContext> objectParser,
|
||||
boolean scriptable, boolean formattable) {
|
||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
}
|
||||
|
||||
public static void declareGeoFields(
|
||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, ?>, QueryParseContext> objectParser,
|
||||
boolean scriptable, boolean formattable) {
|
||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
private static <VS extends ValuesSource> void declareFields(
|
||||
ObjectParser<? extends ValuesSourceAggregationBuilder<VS, ?>, QueryParseContext> objectParser,
|
||||
boolean scriptable, boolean formattable, boolean timezoneAware, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text,
|
||||
new ParseField("field"), ObjectParser.ValueType.STRING);
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::missing, XContentParser::objectText,
|
||||
new ParseField("missing"), ObjectParser.ValueType.VALUE);
|
||||
|
||||
if (formattable) {
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::format, XContentParser::text,
|
||||
new ParseField("format"), ObjectParser.ValueType.STRING);
|
||||
}
|
||||
|
||||
if (scriptable) {
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::script, org.elasticsearch.script.Script::parse,
|
||||
Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::valueType, p -> {
|
||||
ValueType valueType = ValueType.resolveForScript(p.text());
|
||||
if (targetValueType != null && valueType.isNotA(targetValueType)) {
|
||||
throw new ParsingException(p.getTokenLocation(),
|
||||
"Aggregation [" + objectParser.getName() + "] was configured with an incompatible value type ["
|
||||
+ valueType + "]. It can only work on value of type ["
|
||||
+ targetValueType + "]");
|
||||
}
|
||||
return valueType;
|
||||
}, new ParseField("value_type", "valueType"), ObjectParser.ValueType.STRING);
|
||||
}
|
||||
|
||||
if (timezoneAware) {
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::timeZone, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return DateTimeZone.forID(p.text());
|
||||
} else {
|
||||
return DateTimeZone.forOffsetHours(p.intValue());
|
||||
}
|
||||
}, TIME_ZONE, ObjectParser.ValueType.LONG);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
/**
|
||||
* A minimal context for parsing xcontent into aggregation builders.
|
||||
* Only a minimal set of dependencies and settings are available.
|
||||
*/
|
||||
public final class XContentParseContext {
|
||||
|
||||
private final XContentParser parser;
|
||||
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
|
||||
private final String defaultScriptLanguage;
|
||||
|
||||
public XContentParseContext(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String defaultScriptLanguage) {
|
||||
this.parser = parser;
|
||||
this.parseFieldMatcher = parseFieldMatcher;
|
||||
this.defaultScriptLanguage = defaultScriptLanguage;
|
||||
}
|
||||
|
||||
public XContentParser getParser() {
|
||||
return parser;
|
||||
}
|
||||
|
||||
public ParseFieldMatcher getParseFieldMatcher() {
|
||||
return parseFieldMatcher;
|
||||
}
|
||||
|
||||
public String getDefaultScriptLanguage() {
|
||||
return defaultScriptLanguage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the parse field we're looking for matches with the found field name.
|
||||
*
|
||||
* Helper that delegates to {@link ParseFieldMatcher#match(String, ParseField)}.
|
||||
*/
|
||||
public boolean matchField(String fieldName, ParseField parseField) {
|
||||
return parseFieldMatcher.match(fieldName, parseField);
|
||||
}
|
||||
|
||||
}
|
@ -25,7 +25,6 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.AbstractObjectParser.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.AbstractObjectParser.NoContextParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
@ -22,7 +22,6 @@ import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.AbstractObjectParser.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.AbstractObjectParser.NoContextParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
|
@ -42,7 +42,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSq
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder;
|
||||
@ -149,7 +148,8 @@ public class SearchModuleTests extends ModuleTestCase {
|
||||
|
||||
SearchPlugin registersDupeAggregation = new SearchPlugin() {
|
||||
public List<AggregationSpec> getAggregations() {
|
||||
return singletonList(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new, new TermsParser()));
|
||||
return singletonList(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new,
|
||||
TermsAggregationBuilder::parse));
|
||||
}
|
||||
};
|
||||
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, false,
|
||||
|
@ -24,7 +24,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder.Range;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
|
||||
public class GeoDistanceRangeTests extends BaseAggregationTestCase<GeoDistanceAggregationBuilder> {
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
@ -48,7 +49,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||
|
||||
@ -237,9 +237,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
public static SignificanceHeuristic parse(QueryParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
context.getParser().nextToken();
|
||||
context.parser().nextToken();
|
||||
return new SimpleHeuristic();
|
||||
}
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
|
||||
case 2:
|
||||
case 3:
|
||||
case 4:
|
||||
minDocCount = randomInt();
|
||||
minDocCount = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
break;
|
||||
}
|
||||
factory.bucketCountThresholds().setMinDocCount(minDocCount);
|
||||
@ -102,7 +102,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
|
||||
case 2:
|
||||
case 3:
|
||||
case 4:
|
||||
shardMinDocCount = randomInt();
|
||||
shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
break;
|
||||
default:
|
||||
fail();
|
||||
|
@ -69,10 +69,10 @@ public class TermsTests extends BaseAggregationTestCase<TermsAggregationBuilder>
|
||||
factory.missing("MISSING");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.bucketCountThresholds().setRequiredSize(randomIntBetween(1, Integer.MAX_VALUE));
|
||||
factory.size(randomIntBetween(1, Integer.MAX_VALUE));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.bucketCountThresholds().setShardSize(randomIntBetween(1, Integer.MAX_VALUE));
|
||||
factory.shardSize(randomIntBetween(1, Integer.MAX_VALUE));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int minDocCount = randomInt(4);
|
||||
@ -83,12 +83,12 @@ public class TermsTests extends BaseAggregationTestCase<TermsAggregationBuilder>
|
||||
case 2:
|
||||
case 3:
|
||||
case 4:
|
||||
minDocCount = randomInt();
|
||||
minDocCount = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
break;
|
||||
default:
|
||||
fail();
|
||||
}
|
||||
factory.bucketCountThresholds().setMinDocCount(minDocCount);
|
||||
factory.minDocCount(minDocCount);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int shardMinDocCount = randomInt(4);
|
||||
@ -99,12 +99,12 @@ public class TermsTests extends BaseAggregationTestCase<TermsAggregationBuilder>
|
||||
case 2:
|
||||
case 3:
|
||||
case 4:
|
||||
shardMinDocCount = randomInt();
|
||||
shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
break;
|
||||
default:
|
||||
fail();
|
||||
}
|
||||
factory.bucketCountThresholds().setShardMinDocCount(shardMinDocCount);
|
||||
factory.shardMinDocCount(shardMinDocCount);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.collectMode(randomFrom(SubAggCollectionMode.values()));
|
||||
|
@ -26,6 +26,8 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class GeoHashGridParserTests extends ESTestCase {
|
||||
private static final IndicesQueriesRegistry mockRegistry = new IndicesQueriesRegistry();
|
||||
|
||||
@ -37,9 +39,8 @@ public class GeoHashGridParserTests extends ESTestCase {
|
||||
stParser, ParseFieldMatcher.STRICT);
|
||||
XContentParser.Token token = stParser.nextToken();
|
||||
assertSame(XContentParser.Token.START_OBJECT, token);
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
// can create a factory
|
||||
assertNotNull(parser.parse("geohash_grid", parseContext));
|
||||
assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", parseContext));
|
||||
}
|
||||
|
||||
public void testParseValidFromStrings() throws Exception {
|
||||
@ -49,9 +50,8 @@ public class GeoHashGridParserTests extends ESTestCase {
|
||||
QueryParseContext parseContext = new QueryParseContext(mockRegistry, stParser, ParseFieldMatcher.STRICT);
|
||||
XContentParser.Token token = stParser.nextToken();
|
||||
assertSame(XContentParser.Token.START_OBJECT, token);
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
// can create a factory
|
||||
assertNotNull(parser.parse("geohash_grid", parseContext));
|
||||
assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", parseContext));
|
||||
}
|
||||
|
||||
public void testParseErrorOnNonIntPrecision() throws Exception {
|
||||
@ -59,12 +59,12 @@ public class GeoHashGridParserTests extends ESTestCase {
|
||||
QueryParseContext parseContext = new QueryParseContext(mockRegistry, stParser, ParseFieldMatcher.STRICT);
|
||||
XContentParser.Token token = stParser.nextToken();
|
||||
assertSame(XContentParser.Token.START_OBJECT, token);
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
parser.parse("geohash_grid", parseContext);
|
||||
GeoGridAggregationBuilder.parse("geohash_grid", parseContext);
|
||||
fail();
|
||||
} catch (NumberFormatException ex) {
|
||||
assertEquals("For input string: \"2.0\"", ex.getMessage());
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getCause(), instanceOf(NumberFormatException.class));
|
||||
assertEquals("For input string: \"2.0\"", ex.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,12 +73,11 @@ public class GeoHashGridParserTests extends ESTestCase {
|
||||
QueryParseContext parseContext = new QueryParseContext(mockRegistry, stParser, ParseFieldMatcher.STRICT);
|
||||
XContentParser.Token token = stParser.nextToken();
|
||||
assertSame(XContentParser.Token.START_OBJECT, token);
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
parser.parse("geohash_grid", parseContext);
|
||||
GeoGridAggregationBuilder.parse("geohash_grid", parseContext);
|
||||
fail();
|
||||
} catch (ParsingException ex) {
|
||||
assertEquals("Unexpected token VALUE_BOOLEAN [precision] in [geohash_grid].", ex.getMessage());
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,12 +86,12 @@ public class GeoHashGridParserTests extends ESTestCase {
|
||||
QueryParseContext parseContext = new QueryParseContext(mockRegistry, stParser, ParseFieldMatcher.STRICT);
|
||||
XContentParser.Token token = stParser.nextToken();
|
||||
assertSame(XContentParser.Token.START_OBJECT, token);
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
parser.parse("geohash_grid", parseContext);
|
||||
GeoGridAggregationBuilder.parse("geohash_grid", parseContext);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getMessage());
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
}
|
@ -19,9 +19,9 @@
|
||||
package org.elasticsearch.search.aggregations.bucket.significant;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
@ -68,6 +68,7 @@ import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
@ -272,10 +273,10 @@ public class SignificanceHeuristicTests extends ESTestCase {
|
||||
"{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}");
|
||||
QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT);
|
||||
stParser.nextToken();
|
||||
new SignificantTermsParser(significanceHeuristicParserRegistry, registry).parse("testagg", parseContext);
|
||||
SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", parseContext);
|
||||
fail();
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertTrue(e.getMessage().contains(expectedError));
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getCause().getMessage(), containsString(expectedError));
|
||||
}
|
||||
}
|
||||
|
||||
@ -295,8 +296,9 @@ public class SignificanceHeuristicTests extends ESTestCase {
|
||||
IndicesQueriesRegistry registry = new IndicesQueriesRegistry();
|
||||
QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT);
|
||||
stParser.nextToken();
|
||||
SignificantTermsAggregationBuilder aggregatorFactory = (SignificantTermsAggregationBuilder) new SignificantTermsParser(
|
||||
significanceHeuristicParserRegistry, registry).parse("testagg", parseContext);
|
||||
SignificantTermsAggregationBuilder aggregatorFactory =
|
||||
(SignificantTermsAggregationBuilder) SignificantTermsAggregationBuilder.getParser(
|
||||
significanceHeuristicParserRegistry).parse("testagg", parseContext);
|
||||
stParser.nextToken();
|
||||
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
|
||||
assertThat(stParser.currentToken(), equalTo(null));
|
||||
|
@ -188,7 +188,7 @@ final class RemoteResponseParsers {
|
||||
return new Response(timedOut, failures, totalHits, hits, scroll);
|
||||
});
|
||||
static {
|
||||
RESPONSE_PARSER.declareObject(optionalConstructorArg(), ThrowableBuilder.PARSER, new ParseField("error"));
|
||||
RESPONSE_PARSER.declareObject(optionalConstructorArg(), ThrowableBuilder.PARSER::apply, new ParseField("error"));
|
||||
RESPONSE_PARSER.declareBoolean(optionalConstructorArg(), new ParseField("timed_out"));
|
||||
RESPONSE_PARSER.declareString(optionalConstructorArg(), new ParseField("_scroll_id"));
|
||||
RESPONSE_PARSER.declareObject(optionalConstructorArg(), HITS_PARSER, new ParseField("hits"));
|
||||
@ -205,7 +205,7 @@ final class RemoteResponseParsers {
|
||||
PARSER = parser.andThen(ThrowableBuilder::build);
|
||||
parser.declareString(ThrowableBuilder::setType, new ParseField("type"));
|
||||
parser.declareString(ThrowableBuilder::setReason, new ParseField("reason"));
|
||||
parser.declareObject(ThrowableBuilder::setCausedBy, PARSER, new ParseField("caused_by"));
|
||||
parser.declareObject(ThrowableBuilder::setCausedBy, PARSER::apply, new ParseField("caused_by"));
|
||||
|
||||
// So we can give a nice error for parsing exceptions
|
||||
parser.declareInt(ThrowableBuilder::setLine, new ParseField("line"));
|
||||
|
Loading…
x
Reference in New Issue
Block a user