Aggregations cleanup

- removed an abstraction layer that handles the values source (consolidated values source with field data source)
- better handling of value parser/formatter in range & histogram aggs
- the buckets key will now be shown by default in range agg
This commit is contained in:
uboness 2014-03-31 06:51:32 +02:00
parent 2cd6772329
commit d6636fc50c
79 changed files with 1368 additions and 1484 deletions

View File

@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.Arrays;
@ -44,11 +44,11 @@ public class GeoHashGridAggregator extends BucketsAggregator {
private final int requiredSize;
private final int shardSize;
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private final LongHash bucketOrds;
private LongValues values;
public GeoHashGridAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource,
public GeoHashGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource,
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, INITIAL_CAPACITY, aggregationContext, parent);
this.valuesSource = valuesSource;

View File

@ -30,8 +30,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.support.*;
import org.elasticsearch.search.aggregations.support.geopoints.GeoPointValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -99,7 +97,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
shardSize = requiredSize;
}
ValuesSourceConfig<GeoPointValuesSource> config = new ValuesSourceConfig<>(GeoPointValuesSource.class);
ValuesSourceConfig<ValuesSource.GeoPoint> config = new ValuesSourceConfig<>(ValuesSource.GeoPoint.class);
if (field == null) {
return new GeoGridFactory(aggregationName, config, precision, requiredSize, shardSize);
}
@ -116,13 +114,13 @@ public class GeoHashGridParser implements Aggregator.Parser {
}
private static class GeoGridFactory extends ValueSourceAggregatorFactory<GeoPointValuesSource> {
private static class GeoGridFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint> {
private int precision;
private int requiredSize;
private int shardSize;
public GeoGridFactory(String name, ValuesSourceConfig<GeoPointValuesSource> valueSourceConfig,
public GeoGridFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> valueSourceConfig,
int precision, int requiredSize, int shardSize) {
super(name, InternalGeoHashGrid.TYPE.name(), valueSourceConfig);
this.precision = precision;
@ -141,26 +139,24 @@ public class GeoHashGridParser implements Aggregator.Parser {
}
@Override
protected Aggregator create(final GeoPointValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(final ValuesSource.GeoPoint valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
final CellValues cellIdValues = new CellValues(valuesSource, precision);
FieldDataSource.Numeric cellIdSource = new CellIdSource(cellIdValues, valuesSource.metaData());
ValuesSource.Numeric cellIdSource = new CellIdSource(cellIdValues, valuesSource.metaData());
if (cellIdSource.metaData().multiValued()) {
// we need to wrap to ensure uniqueness
cellIdSource = new FieldDataSource.Numeric.SortedAndUnique(cellIdSource);
cellIdSource = new ValuesSource.Numeric.SortedAndUnique(cellIdSource);
}
final NumericValuesSource geohashIdSource = new NumericValuesSource(cellIdSource, null, null);
return new GeoHashGridAggregator(name, factories, geohashIdSource, requiredSize,
shardSize, aggregationContext, parent);
return new GeoHashGridAggregator(name, factories, cellIdSource, requiredSize, shardSize, aggregationContext, parent);
}
private static class CellValues extends LongValues {
private GeoPointValuesSource geoPointValues;
private ValuesSource.GeoPoint geoPointValues;
private GeoPointValues geoValues;
private int precision;
protected CellValues(GeoPointValuesSource geoPointValues, int precision) {
protected CellValues(ValuesSource.GeoPoint geoPointValues, int precision) {
super(true);
this.geoPointValues = geoPointValues;
this.precision = precision;
@ -168,7 +164,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
@Override
public int setDocument(int docId) {
geoValues = geoPointValues.values();
geoValues = geoPointValues.geoPointValues();
return geoValues.setDocument(docId);
}
@ -180,7 +176,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
}
private static class CellIdSource extends FieldDataSource.Numeric {
private static class CellIdSource extends ValuesSource.Numeric {
private final LongValues values;
private MetaData metaData;

View File

@ -33,11 +33,11 @@ import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
@ -82,7 +82,7 @@ public class DateHistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
String script = null;
@ -230,30 +230,25 @@ public class DateHistogramParser implements Aggregator.Parser {
.preOffset(preOffset).postOffset(postOffset)
.build();
if (format != null) {
config.formatter(new ValueFormatter.DateTime(format));
}
ValueFormatter valueFormatter = format != null ? new ValueFormatter.DateTime(format) : null;
if (field == null) {
if (searchScript != null) {
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT));
config.parser(valueParser);
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
// falling back on the get field data context
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, null, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
if (format == null) {
config.formatter(new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER));
}
config.parser(new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT)));
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
valueFormatter = format == null ? new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER) : null;
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT));
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
if (!(mapper instanceof DateFieldMapper)) {
@ -263,10 +258,10 @@ public class DateHistogramParser implements Aggregator.Parser {
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format == null) {
config.formatter(new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter()));
valueFormatter = new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter());
}
config.parser(new ValueParser.DateMath(new DateMathParser(((DateFieldMapper) mapper).dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)));
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(((DateFieldMapper) mapper).dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
private static InternalOrder resolveOrder(String key, boolean asc) {

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;

View File

@ -30,10 +30,11 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import java.io.IOException;
import java.util.ArrayList;
@ -42,7 +43,9 @@ import java.util.List;
public class HistogramAggregator extends BucketsAggregator {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private final ValueFormatter formatter;
private final ValueParser parser;
private final Rounding rounding;
private final InternalOrder order;
private final boolean keyed;
@ -55,7 +58,8 @@ public class HistogramAggregator extends BucketsAggregator {
public HistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, InternalOrder order,
boolean keyed, long minDocCount, @Nullable ExtendedBounds extendedBounds,
@Nullable NumericValuesSource valuesSource, long initialCapacity, InternalHistogram.Factory<?> histogramFactory,
@Nullable ValuesSource.Numeric valuesSource, ValueFormatter formatter, ValueParser parser,
long initialCapacity, InternalHistogram.Factory<?> histogramFactory,
AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, initialCapacity, aggregationContext, parent);
@ -65,6 +69,8 @@ public class HistogramAggregator extends BucketsAggregator {
this.minDocCount = minDocCount;
this.extendedBounds = extendedBounds;
this.valuesSource = valuesSource;
this.formatter = formatter;
this.parser = parser;
this.histogramFactory = histogramFactory;
bucketOrds = new LongHash(initialCapacity, aggregationContext.bigArrays());
@ -111,20 +117,18 @@ public class HistogramAggregator extends BucketsAggregator {
if (ord < 0) {
continue; // slot is not allocated
}
buckets.add(histogramFactory.createBucket(rounding.valueForKey(bucketOrds.key(i)), bucketDocCount(ord), bucketAggregations(ord), valuesSource.formatter()));
buckets.add(histogramFactory.createBucket(rounding.valueForKey(bucketOrds.key(i)), bucketDocCount(ord), bucketAggregations(ord), formatter));
}
CollectionUtil.introSort(buckets, order.comparator());
// value source will be null for unmapped fields
ValueFormatter formatter = valuesSource != null ? valuesSource.formatter() : null;
InternalHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 ? new InternalHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) : null;
return histogramFactory.create(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
}
@Override
public InternalAggregation buildEmptyAggregation() {
ValueFormatter formatter = valuesSource != null ? valuesSource.formatter() : null;
InternalHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 ? new InternalHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) : null;
return histogramFactory.create(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed);
}
@ -134,7 +138,7 @@ public class HistogramAggregator extends BucketsAggregator {
Releasables.release(bucketOrds);
}
public static class Factory extends ValueSourceAggregatorFactory<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric> {
private final Rounding rounding;
private final InternalOrder order;
@ -143,11 +147,11 @@ public class HistogramAggregator extends BucketsAggregator {
private final ExtendedBounds extendedBounds;
private final InternalHistogram.Factory<?> histogramFactory;
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valueSourceConfig,
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> config, ValueFormatter formatter, ValueParser parser,
Rounding rounding, InternalOrder order, boolean keyed, long minDocCount,
ExtendedBounds extendedBounds, InternalHistogram.Factory<?> histogramFactory) {
super(name, histogramFactory.type(), valueSourceConfig);
super(name, histogramFactory.type(), config, formatter, parser);
this.rounding = rounding;
this.order = order;
this.keyed = keyed;
@ -158,11 +162,11 @@ public class HistogramAggregator extends BucketsAggregator {
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, 0, histogramFactory, aggregationContext, parent);
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, null, null, 0, histogramFactory, aggregationContext, parent);
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
// todo if we'll keep track of min/max values in IndexFieldData, we could use the max here to come up with a better estimation for the buckets count
// we need to round the bounds given by the user and we have to do it for every aggregator we crate
@ -171,10 +175,10 @@ public class HistogramAggregator extends BucketsAggregator {
ExtendedBounds roundedBounds = null;
if (extendedBounds != null) {
// we need to process & validate here using the parser
extendedBounds.processAndValidate(name, aggregationContext.searchContext(), valuesSource != null ? valuesSource.parser() : null);
extendedBounds.processAndValidate(name, aggregationContext.searchContext(), parser);
roundedBounds = extendedBounds.round(rounding);
}
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource, 50, histogramFactory, aggregationContext, parent);
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource, formatter, parser, 50, histogramFactory, aggregationContext, parent);
}
}

View File

@ -26,10 +26,11 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -50,7 +51,7 @@ public class HistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
String script = null;
@ -76,8 +77,6 @@ public class HistogramParser implements Aggregator.Parser {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
@ -157,23 +156,22 @@ public class HistogramParser implements Aggregator.Parser {
}
if (field == null) {
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
return new HistogramAggregator.Factory(aggregationName, config, null, null, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
return new HistogramAggregator.Factory(aggregationName, config, null, null, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format != null) {
config.formatter(new ValueFormatter.Number.Pattern(format));
}
ValueFormatter valueFormatter = format == null ? ValueFormatter.RAW : new ValueFormatter.Number.Pattern(format);
ValueParser valueParser = format == null ? ValueParser.RAW : new ValueParser.Number.Pattern(format);
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
}

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -62,7 +62,7 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
@Override
public String getKey() {
return formatter != null ? formatter.format(key) : DateFieldMapper.Defaults.DATE_TIME_FORMATTER.printer().print(key);
return formatter != null ? formatter.format(key) : ValueFormatter.DateTime.DEFAULT.format(key);
}
@Override

View File

@ -34,8 +34,8 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
@ -69,7 +69,7 @@ public class MissingAggregator extends SingleBucketAggregator {
return new InternalMissing(name, 0, buildEmptySubAggregations());
}
public static class Factory extends ValueSourceAggregatorFactory {
public static class Factory extends ValuesSourceAggregatorFactory {
public Factory(String name, ValuesSourceConfig valueSourceConfig) {
super(name, InternalMissing.TYPE.name(), valueSourceConfig);

View File

@ -23,8 +23,8 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;

View File

@ -29,8 +29,8 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.*;
@ -64,16 +64,9 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
private long docCount;
InternalAggregations aggregations;
private String key;
private boolean explicitKey;
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
if (key != null) {
this.key = key;
explicitKey = true;
} else {
this.key = key(from, to, formatter);
explicitKey = false;
}
this.key = key != null ? key : key(from, to, formatter);
this.from = from;
this.to = to;
this.docCount = docCount;
@ -135,9 +128,7 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
builder.startObject(key);
} else {
builder.startObject();
if (explicitKey) {
builder.field(CommonFields.KEY, key);
}
builder.field(CommonFields.KEY, key);
}
if (!Double.isInfinite(from)) {
builder.field(CommonFields.FROM, from);

View File

@ -25,11 +25,11 @@ import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import java.io.IOException;
import java.util.ArrayList;
@ -75,7 +75,8 @@ public class RangeAggregator extends BucketsAggregator {
}
}
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private final ValueFormatter formatter;
private final Range[] ranges;
private final boolean keyed;
private final InternalRange.Factory rangeFactory;
@ -85,7 +86,9 @@ public class RangeAggregator extends BucketsAggregator {
public RangeAggregator(String name,
AggregatorFactories factories,
NumericValuesSource valuesSource,
ValuesSource.Numeric valuesSource,
ValueFormatter formatter,
ValueParser parser,
InternalRange.Factory rangeFactory,
List<Range> ranges,
boolean keyed,
@ -95,11 +98,12 @@ public class RangeAggregator extends BucketsAggregator {
super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent);
assert valuesSource != null;
this.valuesSource = valuesSource;
this.formatter = formatter;
this.keyed = keyed;
this.rangeFactory = rangeFactory;
this.ranges = ranges.toArray(new Range[ranges.size()]);
for (int i = 0; i < this.ranges.length; i++) {
this.ranges[i].process(valuesSource.parser(), context);
this.ranges[i].process(parser, context);
}
sortRanges(this.ranges);
@ -190,11 +194,10 @@ public class RangeAggregator extends BucketsAggregator {
Range range = ranges[i];
final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i);
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(
range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), valuesSource.formatter());
range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), formatter);
buckets.add(bucket);
}
// value source can be null in the case of unmapped fields
ValueFormatter formatter = valuesSource != null ? valuesSource.formatter() : null;
return rangeFactory.create(name, buckets, formatter, keyed, false);
}
@ -205,11 +208,10 @@ public class RangeAggregator extends BucketsAggregator {
for (int i = 0; i < ranges.length; i++) {
Range range = ranges[i];
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(
range.key, range.from, range.to, 0, subAggs, valuesSource.formatter());
range.key, range.from, range.to, 0, subAggs, formatter);
buckets.add(bucket);
}
// value source can be null in the case of unmapped fields
ValueFormatter formatter = valuesSource != null ? valuesSource.formatter() : null;
return rangeFactory.create(name, buckets, formatter, keyed, false);
}
@ -274,14 +276,14 @@ public class RangeAggregator extends BucketsAggregator {
}
}
public static class Factory extends ValueSourceAggregatorFactory<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric> {
private final InternalRange.Factory rangeFactory;
private final List<Range> ranges;
private final boolean keyed;
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valueSourceConfig, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) {
super(name, rangeFactory.type(), valueSourceConfig);
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valueSourceConfig, ValueFormatter formatter, ValueParser parser, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) {
super(name, rangeFactory.type(), valueSourceConfig, formatter, parser);
this.rangeFactory = rangeFactory;
this.ranges = ranges;
this.keyed = keyed;
@ -289,12 +291,12 @@ public class RangeAggregator extends BucketsAggregator {
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new Unmapped(name, ranges, keyed, valuesSourceConfig.formatter(), valuesSourceConfig.parser(), aggregationContext, parent, rangeFactory);
return new Unmapped(name, ranges, keyed, formatter, parser, aggregationContext, parent, rangeFactory);
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new RangeAggregator(name, factories, valuesSource, rangeFactory, ranges, keyed, aggregationContext, parent);
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new RangeAggregator(name, factories, valuesSource, formatter, parser, rangeFactory, ranges, keyed, aggregationContext, parent);
}
}

View File

@ -18,16 +18,21 @@
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
*
*/
public class RangeBuilder extends AbstractRangeBuilder<RangeBuilder> {
private String format;
public RangeBuilder(String name) {
super(name, InternalRange.TYPE.name());
}
public RangeBuilder addRange(String key, double from, double to) {
ranges.add(new Range(key, from, to));
return this;
@ -55,4 +60,19 @@ public class RangeBuilder extends AbstractRangeBuilder<RangeBuilder> {
return addUnboundedFrom(null, from);
}
public RangeBuilder format(String format) {
this.format = format;
return this;
}
@Override
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
super.doInternalXContent(builder, params);
if (format != null) {
builder.field("format", format);
}
return builder;
}
}

View File

@ -25,8 +25,10 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -47,7 +49,7 @@ public class RangeParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
List<RangeAggregator.Range> ranges = null;
@ -56,6 +58,7 @@ public class RangeParser implements Aggregator.Parser {
Map<String, Object> scriptParams = null;
boolean keyed = false;
boolean assumeSorted = false;
String format = null;
XContentParser.Token token;
String currentFieldName = null;
@ -69,6 +72,8 @@ public class RangeParser implements Aggregator.Parser {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
@ -139,18 +144,20 @@ public class RangeParser implements Aggregator.Parser {
}
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, null, null, InternalRange.FACTORY, ranges, keyed);
}
ValueFormatter valueFormatter = format == null ? ValueFormatter.RAW : new ValueFormatter.Number.Pattern(format);
ValueParser valueParser = format == null ? ValueParser.RAW : new ValueParser.Number.Pattern(format);
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalRange.FACTORY, ranges, keyed);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalRange.FACTORY, ranges, keyed);
}
}

View File

@ -27,11 +27,11 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -52,7 +52,7 @@ public class DateRangeParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
List<RangeAggregator.Range> ranges = null;
@ -148,22 +148,17 @@ public class DateRangeParser implements Aggregator.Parser {
config.ensureSorted(true);
}
if (format != null) {
config.formatter(new ValueFormatter.DateTime(format));
} else {
config.formatter(ValueFormatter.DateTime.DEFAULT);
}
config.parser(ValueParser.DateMath.DEFAULT);
ValueFormatter valueFormatter = format != null ? new ValueFormatter.DateTime(format) : ValueFormatter.DateTime.DEFAULT;
ValueParser valueParser = ValueParser.DateMath.DEFAULT;
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, InternalDateRange.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, InternalDateRange.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
}
if (!(mapper instanceof DateFieldMapper)) {
@ -173,9 +168,9 @@ public class DateRangeParser implements Aggregator.Parser {
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format == null) {
config.formatter(new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter()));
valueFormatter = new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter());
}
config.parser(new ValueParser.DateMath(((DateFieldMapper) mapper).dateMathParser()));
return new RangeAggregator.Factory(aggregationName, config, InternalDateRange.FACTORY, ranges, keyed);
valueParser = new ValueParser.DateMath(((DateFieldMapper) mapper).dateMathParser());
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
}
}

View File

@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;

View File

@ -31,8 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
import org.elasticsearch.search.aggregations.support.*;
import org.elasticsearch.search.aggregations.support.geopoints.GeoPointValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -178,7 +177,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing [origin] in geo_distance aggregator [" + aggregationName + "]");
}
ValuesSourceConfig<GeoPointValuesSource> config = new ValuesSourceConfig<>(GeoPointValuesSource.class);
ValuesSourceConfig<ValuesSource.GeoPoint> config = new ValuesSourceConfig<>(ValuesSource.GeoPoint.class);
if (field == null) {
return new GeoDistanceFactory(aggregationName, config, InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
@ -195,7 +194,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
return new GeoDistanceFactory(aggregationName, config, InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
}
private static class GeoDistanceFactory extends ValueSourceAggregatorFactory<GeoPointValuesSource> {
private static class GeoDistanceFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint> {
private final GeoPoint origin;
private final DistanceUnit unit;
@ -204,7 +203,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
private final List<RangeAggregator.Range> ranges;
private final boolean keyed;
public GeoDistanceFactory(String name, ValuesSourceConfig<GeoPointValuesSource> valueSourceConfig,
public GeoDistanceFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> valueSourceConfig,
InternalRange.Factory rangeFactory, GeoPoint origin, DistanceUnit unit, GeoDistance distanceType,
List<RangeAggregator.Range> ranges, boolean keyed) {
super(name, rangeFactory.type(), valueSourceConfig);
@ -218,30 +217,29 @@ public class GeoDistanceParser implements Aggregator.Parser {
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new Unmapped(name, ranges, keyed, valuesSourceConfig.formatter(), valuesSourceConfig.parser(), aggregationContext, parent, rangeFactory);
return new Unmapped(name, ranges, keyed, null, null, aggregationContext, parent, rangeFactory);
}
@Override
protected Aggregator create(final GeoPointValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(final ValuesSource.GeoPoint valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
final DistanceValues distanceValues = new DistanceValues(valuesSource, distanceType, origin, unit);
FieldDataSource.Numeric distanceSource = new DistanceSource(distanceValues, valuesSource.metaData());
ValuesSource.Numeric distanceSource = new DistanceSource(distanceValues, valuesSource.metaData());
if (distanceSource.metaData().multiValued()) {
// we need to ensure uniqueness
distanceSource = new FieldDataSource.Numeric.SortedAndUnique(distanceSource);
distanceSource = new ValuesSource.Numeric.SortedAndUnique(distanceSource);
}
final NumericValuesSource numericSource = new NumericValuesSource(distanceSource, null, null);
return new RangeAggregator(name, factories, numericSource, rangeFactory, ranges, keyed, aggregationContext, parent);
return new RangeAggregator(name, factories, distanceSource, null, null, rangeFactory, ranges, keyed, aggregationContext, parent);
}
private static class DistanceValues extends DoubleValues {
private final GeoPointValuesSource geoPointValues;
private final ValuesSource.GeoPoint geoPointValues;
private GeoPointValues geoValues;
private final GeoDistance distanceType;
private final GeoPoint origin;
private final DistanceUnit unit;
protected DistanceValues(GeoPointValuesSource geoPointValues, GeoDistance distanceType, GeoPoint origin, DistanceUnit unit) {
protected DistanceValues(ValuesSource.GeoPoint geoPointValues, GeoDistance distanceType, GeoPoint origin, DistanceUnit unit) {
super(true);
this.geoPointValues = geoPointValues;
this.distanceType = distanceType;
@ -251,7 +249,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
@Override
public int setDocument(int docId) {
geoValues = geoPointValues.values();
geoValues = geoPointValues.geoPointValues();
return geoValues.setDocument(docId);
}
@ -263,7 +261,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
}
private static class DistanceSource extends FieldDataSource.Numeric {
private static class DistanceSource extends ValuesSource.Numeric {
private final DoubleValues values;
private final MetaData metaData;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;

View File

@ -27,11 +27,11 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -52,7 +52,7 @@ public class IpRangeParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
List<RangeAggregator.Range> ranges = null;
@ -150,17 +150,14 @@ public class IpRangeParser implements Aggregator.Parser {
config.ensureSorted(true);
}
config.formatter(ValueFormatter.IPv4);
config.parser(ValueParser.IPv4);
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, InternalIPv4Range.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, InternalIPv4Range.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
}
if (!(mapper instanceof IpFieldMapper)) {
@ -169,7 +166,7 @@ public class IpRangeParser implements Aggregator.Parser {
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new RangeAggregator.Factory(aggregationName, config, InternalIPv4Range.FACTORY, ranges, keyed);
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
}
private static void parseMaskRange(String cidr, RangeAggregator.Range range, String aggregationName, SearchContext ctx) {

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import com.google.common.primitives.Longs;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
@ -26,8 +25,8 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -24,7 +24,8 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException;
@ -36,11 +37,11 @@ import java.util.Collections;
*/
public class SignificantLongTermsAggregator extends LongTermsAggregator {
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource,
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
long estimatedBucketCount, int requiredSize, int shardSize, long minDocCount,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory) {
super(name, factories, valuesSource, estimatedBucketCount, null, requiredSize, shardSize, minDocCount, aggregationContext, parent);
super(name, factories, valuesSource, formatter, estimatedBucketCount, null, requiredSize, shardSize, minDocCount, aggregationContext, parent);
this.termsAggFactory = termsAggFactory;
}
@ -96,7 +97,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket;
}
return new SignificantLongTerms(subsetSize, supersetSize, name, valuesSource.formatter(), requiredSize, minDocCount,
return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, requiredSize, minDocCount,
Arrays.asList(list));
}
@ -106,7 +107,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
ContextIndexSearcher searcher = context.searchContext().searcher();
IndexReader topReader = searcher.getIndexReader();
int supersetSize = topReader.numDocs();
return new SignificantLongTerms(0, supersetSize, name, valuesSource.formatter(), requiredSize, minDocCount, Collections.<InternalSignificantTerms.Bucket>emptyList());
return new SignificantLongTerms(0, supersetSize, name, formatter, requiredSize, minDocCount, Collections.<InternalSignificantTerms.Bucket>emptyList());
}
@Override

View File

@ -32,7 +32,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException;
@ -128,12 +127,12 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
*/
public static class WithOrdinals extends SignificantStringTermsAggregator {
private final BytesValuesSource.WithOrdinals valuesSource;
private final ValuesSource.Bytes.WithOrdinals valuesSource;
private BytesValues.WithOrdinals bytesValues;
private Ordinals.Docs ordinals;
private LongArray ordinalToBucket;
public WithOrdinals(String name, AggregatorFactories factories, BytesValuesSource.WithOrdinals valuesSource,
public WithOrdinals(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
long esitmatedBucketCount, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext,
Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory) {
super(name, factories, valuesSource, esitmatedBucketCount, requiredSize, shardSize, minDocCount, null, aggregationContext, parent, termsAggFactory);

View File

@ -35,11 +35,11 @@ import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
@ -48,7 +48,7 @@ import java.io.IOException;
/**
*
*/
public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFactory implements Releasable {
public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFactory implements Releasable {
public static final String EXECUTION_HINT_VALUE_MAP = "map";
public static final String EXECUTION_HINT_VALUE_ORDINALS = "ordinals";
@ -67,17 +67,17 @@ public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFact
private TermsEnum termsEnum;
private int numberOfAggregatorsCreated = 0;
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, int requiredSize,
int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) {
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, ValueFormatter formatter, ValueParser parser,
int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) {
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig);
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig, formatter, parser);
this.requiredSize = requiredSize;
this.shardSize = shardSize;
this.minDocCount = minDocCount;
this.includeExclude = includeExclude;
this.executionHint = executionHint;
if (!valueSourceConfig.unmapped()) {
this.indexedFieldName = valuesSourceConfig.fieldContext().field();
this.indexedFieldName = config.fieldContext().field();
mapper = SearchContext.current().smartNameFieldMapper(indexedFieldName);
}
bigArrays = SearchContext.current().bigArrays();
@ -144,18 +144,18 @@ public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFact
// And that all values are not necessarily visited by the matches.
estimatedBucketCount = Math.min(estimatedBucketCount, 512);
if (valuesSource instanceof BytesValuesSource) {
if (valuesSource instanceof ValuesSource.Bytes) {
if (executionHint != null && !executionHint.equals(EXECUTION_HINT_VALUE_MAP) && !executionHint.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
throw new ElasticsearchIllegalArgumentException("execution_hint can only be '" + EXECUTION_HINT_VALUE_MAP + "' or '" + EXECUTION_HINT_VALUE_ORDINALS + "', not " + executionHint);
}
String execution = executionHint;
if (!(valuesSource instanceof BytesValuesSource.WithOrdinals)) {
if (!(valuesSource instanceof ValuesSource.Bytes.WithOrdinals)) {
execution = EXECUTION_HINT_VALUE_MAP;
} else if (includeExclude != null) {
execution = EXECUTION_HINT_VALUE_MAP;
}
if (execution == null) {
if ((valuesSource instanceof BytesValuesSource.WithOrdinals)
if ((valuesSource instanceof ValuesSource.Bytes.WithOrdinals)
&& !hasParentBucketAggregator(parent)) {
execution = EXECUTION_HINT_VALUE_ORDINALS;
} else {
@ -166,7 +166,7 @@ public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFact
if (execution.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
assert includeExclude == null;
return new SignificantStringTermsAggregator.WithOrdinals(name, factories, (BytesValuesSource.WithOrdinals) valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this);
return new SignificantStringTermsAggregator.WithOrdinals(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this);
}
return new SignificantStringTermsAggregator(name, factories, valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, includeExclude, aggregationContext, parent, this);
}
@ -176,15 +176,15 @@ public class SignificantTermsAggregatorFactory extends ValueSourceAggregatorFact
"settings as it can only be applied to string values");
}
if (valuesSource instanceof NumericValuesSource) {
if (valuesSource instanceof ValuesSource.Numeric) {
if (((NumericValuesSource) valuesSource).isFloatingPoint()) {
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
throw new UnsupportedOperationException("No support for examining floating point numerics");
}
return new SignificantLongTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this);
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this);
}
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + valuesSourceConfig.fieldContext().field() +
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + config.fieldContext().field() +
"]. It can only be applied to numeric or string fields.");
}

View File

@ -30,12 +30,11 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -167,36 +166,36 @@ public class SignificantTermsParser implements Aggregator.Parser {
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(BytesValuesSource.class);
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
config.unmapped(true);
return new SignificantTermsAggregatorFactory(aggregationName, config, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
return new SignificantTermsAggregatorFactory(aggregationName, config, null, null, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
ValuesSourceConfig<?> config;
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (mapper instanceof DateFieldMapper) {
DateFieldMapper dateMapper = (DateFieldMapper) mapper;
ValueFormatter formatter = format == null ?
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = format == null ?
new ValueFormatter.DateTime(dateMapper.dateTimeFormatter()) :
new ValueFormatter.DateTime(format);
config = new ValuesSourceConfig<>(NumericValuesSource.class)
.formatter(formatter)
.parser(new ValueParser.DateMath(dateMapper.dateMathParser()));
valueParser = new ValueParser.DateMath(dateMapper.dateMathParser());
} else if (mapper instanceof IpFieldMapper) {
config = new ValuesSourceConfig<>(NumericValuesSource.class)
.formatter(ValueFormatter.IPv4)
.parser(ValueParser.IPv4);
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = ValueFormatter.IPv4;
valueParser = ValueParser.IPv4;
} else if (indexFieldData instanceof IndexNumericFieldData) {
config = new ValuesSourceConfig<>(NumericValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
if (format != null) {
config.formatter(new ValueFormatter.Number.Pattern(format));
valueFormatter = new ValueFormatter.Number.Pattern(format);
}
} else {
config = new ValuesSourceConfig<>(BytesValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
// TODO: it will make sense to set false instead here if the aggregator factory uses
// ordinals instead of hash tables
config.needsHashes(true);
@ -206,7 +205,7 @@ public class SignificantTermsParser implements Aggregator.Parser {
// We need values to be unique to be able to run terms aggs efficiently
config.ensureUnique(true);
return new SignificantTermsAggregatorFactory(aggregationName, config, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
return new SignificantTermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
}

View File

@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -20,14 +20,15 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.Arrays;
@ -42,14 +43,16 @@ public class DoubleTermsAggregator extends BucketsAggregator {
private final int requiredSize;
private final int shardSize;
private final long minDocCount;
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private final ValueFormatter formatter;
private final LongHash bucketOrds;
private DoubleValues values;
public DoubleTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource, long estimatedBucketCount,
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter, long estimatedBucketCount,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
this.valuesSource = valuesSource;
this.formatter = formatter;
this.order = InternalOrder.validate(order, this);
this.requiredSize = requiredSize;
this.shardSize = shardSize;
@ -127,12 +130,12 @@ public class DoubleTermsAggregator extends BucketsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket;
}
return new DoubleTerms(name, order, valuesSource.formatter(), requiredSize, minDocCount, Arrays.asList(list));
return new DoubleTerms(name, order, formatter, requiredSize, minDocCount, Arrays.asList(list));
}
@Override
public DoubleTerms buildEmptyAggregation() {
return new DoubleTerms(name, order, valuesSource.formatter(), requiredSize, minDocCount, Collections.<InternalTerms.Bucket>emptyList());
return new DoubleTerms(name, order, formatter, requiredSize, minDocCount, Collections.<InternalTerms.Bucket>emptyList());
}
@Override

View File

@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -20,15 +20,16 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.Arrays;
@ -43,14 +44,16 @@ public class LongTermsAggregator extends BucketsAggregator {
protected final int requiredSize;
protected final int shardSize;
protected final long minDocCount;
protected final NumericValuesSource valuesSource;
protected final ValuesSource.Numeric valuesSource;
protected final ValueFormatter formatter;
protected final LongHash bucketOrds;
private LongValues values;
public LongTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource, long estimatedBucketCount,
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter, long estimatedBucketCount,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
this.valuesSource = valuesSource;
this.formatter = formatter;
this.order = InternalOrder.validate(order, this);
this.requiredSize = requiredSize;
this.shardSize = shardSize;
@ -127,12 +130,12 @@ public class LongTermsAggregator extends BucketsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket;
}
return new LongTerms(name, order, valuesSource.formatter(), requiredSize, minDocCount, Arrays.asList(list));
return new LongTerms(name, order, formatter, requiredSize, minDocCount, Arrays.asList(list));
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new LongTerms(name, order, valuesSource.formatter(), requiredSize, minDocCount, Collections.<InternalTerms.Bucket>emptyList());
return new LongTerms(name, order, formatter, requiredSize, minDocCount, Collections.<InternalTerms.Bucket>emptyList());
}
@Override

View File

@ -27,6 +27,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Iterators2;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
@ -34,12 +35,10 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import java.io.IOException;
import java.util.*;
@ -259,12 +258,12 @@ public class StringTermsAggregator extends BucketsAggregator {
*/
public static class WithOrdinals extends StringTermsAggregator {
private final BytesValuesSource.WithOrdinals valuesSource;
private final ValuesSource.Bytes.WithOrdinals valuesSource;
private BytesValues.WithOrdinals bytesValues;
private Ordinals.Docs ordinals;
private LongArray ordinalToBucket;
public WithOrdinals(String name, AggregatorFactories factories, BytesValuesSource.WithOrdinals valuesSource, long esitmatedBucketCount,
public WithOrdinals(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, long esitmatedBucketCount,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) {
super(name, factories, valuesSource, esitmatedBucketCount, order, requiredSize, shardSize, minDocCount, null, aggregationContext, parent);
this.valuesSource = valuesSource;

View File

@ -26,16 +26,16 @@ import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
/**
*
*/
public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory {
public enum ExecutionMode {
MAP(new ParseField("map")) {
@ -57,7 +57,7 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
if (includeExclude != null) {
throw new ElasticsearchIllegalArgumentException("The `" + this + "` execution mode cannot filter terms.");
}
return new StringTermsAggregator.WithOrdinals(name, factories, (BytesValuesSource.WithOrdinals) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
return new StringTermsAggregator.WithOrdinals(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
}
};
@ -94,8 +94,10 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
private final IncludeExclude includeExclude;
private final String executionHint;
public TermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, InternalOrder order, int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) {
super(name, StringTerms.TYPE.name(), valueSourceConfig);
public TermsAggregatorFactory(String name, ValuesSourceConfig config, ValueFormatter formatter, ValueParser parser,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) {
super(name, StringTerms.TYPE.name(), config, formatter, parser);
this.order = order;
this.requiredSize = requiredSize;
this.shardSize = shardSize;
@ -166,14 +168,14 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
// And that all values are not necessarily visited by the matches.
estimatedBucketCount = Math.min(estimatedBucketCount, 512);
if (valuesSource instanceof BytesValuesSource) {
if (valuesSource instanceof ValuesSource.Bytes) {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint);
}
// In some cases, using ordinals is just not supported: override it
if (!(valuesSource instanceof BytesValuesSource.WithOrdinals)) {
if (!(valuesSource instanceof ValuesSource.Bytes.WithOrdinals)) {
execution = ExecutionMode.MAP;
} else if (includeExclude != null) {
execution = ExecutionMode.MAP;
@ -181,7 +183,7 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
if (execution == null) {
// Let's try to use a good default
if ((valuesSource instanceof BytesValuesSource.WithOrdinals)
if ((valuesSource instanceof ValuesSource.Bytes.WithOrdinals)
&& shouldUseOrdinals(parent, valuesSource, aggregationContext)) {
execution = ExecutionMode.ORDINALS;
} else {
@ -198,14 +200,14 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
"settings as it can only be applied to string values");
}
if (valuesSource instanceof NumericValuesSource) {
if (((NumericValuesSource) valuesSource).isFloatingPoint()) {
return new DoubleTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
if (valuesSource instanceof ValuesSource.Numeric) {
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
}
return new LongTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
}
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + valuesSourceConfig.fieldContext().field() +
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field() +
"]. It can only be applied to numeric or string fields.");
}

View File

@ -30,13 +30,11 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -207,51 +205,68 @@ public class TermsParser implements Aggregator.Parser {
Class<? extends ValuesSource> valueSourceType = script == null ?
ValuesSource.class : // unknown, will inherit whatever is in the context
valueType != null ? valueType.scriptValueType.getValuesSourceType() : // the user explicitly defined a value type
BytesValuesSource.class; // defaulting to bytes
ValuesSource.Bytes.class; // defaulting to bytes
ValuesSourceConfig<?> config = new ValuesSourceConfig(valueSourceType);
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (valueType != null) {
config.scriptValueType(valueType.scriptValueType);
if (valueType != Terms.ValueType.STRING && format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
}
config.script(searchScript);
if (!assumeUnique) {
config.ensureUnique(true);
}
return new TermsAggregatorFactory(aggregationName, config, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(BytesValuesSource.class);
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
config.unmapped(true);
return new TermsAggregatorFactory(aggregationName, config, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
if (valueType != null) {
config.scriptValueType(valueType.scriptValueType);
if (valueType != Terms.ValueType.STRING && format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
}
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
ValuesSourceConfig<?> config;
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (mapper instanceof DateFieldMapper) {
DateFieldMapper dateMapper = (DateFieldMapper) mapper;
ValueFormatter formatter = format == null ?
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = format == null ?
new ValueFormatter.DateTime(dateMapper.dateTimeFormatter()) :
new ValueFormatter.DateTime(format);
config = new ValuesSourceConfig<>(NumericValuesSource.class)
.formatter(formatter)
.parser(new ValueParser.DateMath(dateMapper.dateMathParser()));
valueParser = new ValueParser.DateMath(dateMapper.dateMathParser());
} else if (mapper instanceof IpFieldMapper) {
config = new ValuesSourceConfig<>(NumericValuesSource.class)
.formatter(ValueFormatter.IPv4)
.parser(ValueParser.IPv4);
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = ValueFormatter.IPv4;
valueParser = ValueParser.IPv4;
} else if (indexFieldData instanceof IndexNumericFieldData) {
config = new ValuesSourceConfig<>(NumericValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
if (format != null) {
config.formatter(new ValueFormatter.Number.Pattern(format));
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
} else {
config = new ValuesSourceConfig<>(BytesValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
// TODO: it will make sense to set false instead here if the aggregator factory uses
// ordinals instead of hash tables
config.needsHashes(true);
@ -266,7 +281,7 @@ public class TermsParser implements Aggregator.Parser {
config.ensureUnique(true);
}
return new TermsAggregatorFactory(aggregationName, config, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
static InternalOrder resolveOrder(String key, boolean asc) {

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
/**
*

View File

@ -24,9 +24,9 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -44,7 +44,7 @@ public abstract class ValuesSourceMetricsAggregatorParser<S extends MetricsAggre
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
String script = null;
@ -107,5 +107,5 @@ public abstract class ValuesSourceMetricsAggregatorParser<S extends MetricsAggre
return createFactory(aggregationName, config);
}
protected abstract AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config);
protected abstract AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config);
}

View File

@ -27,9 +27,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -38,13 +38,13 @@ import java.io.IOException;
*/
public class AvgAggregator extends MetricsAggregator.SingleValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private LongArray counts;
private DoubleArray sums;
public AvgAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public AvgAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -96,9 +96,9 @@ public class AvgAggregator extends MetricsAggregator.SingleValue {
return new InternalAvg(name, 0.0, 0l);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, String type, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, String type, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, type, valuesSourceConfig);
}
@ -108,7 +108,7 @@ public class AvgAggregator extends MetricsAggregator.SingleValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new AvgAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.avg;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -34,7 +34,7 @@ public class AvgParser extends ValuesSourceMetricsAggregatorParser<InternalAvg>
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new AvgAggregator.Factory(aggregationName, type(), config);
}

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -40,7 +40,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -80,12 +79,12 @@ public class CardinalityAggregator extends MetricsAggregator.SingleValue {
// requested not to hash the values (perhaps they already hashed the values themselves before indexing the doc)
// so we can just work with the original value source as is
if (!rehash) {
LongValues hashValues = ((NumericValuesSource) valuesSource).longValues();
LongValues hashValues = ((ValuesSource.Numeric) valuesSource).longValues();
return new DirectCollector(counts, hashValues);
}
if (valuesSource instanceof NumericValuesSource) {
NumericValuesSource source = (NumericValuesSource) valuesSource;
if (valuesSource instanceof ValuesSource.Numeric) {
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
LongValues hashValues = source.isFloatingPoint() ? MurmurHash3Values.wrap(source.doubleValues()) : MurmurHash3Values.wrap(source.longValues());
return new DirectCollector(counts, hashValues);
}

View File

@ -23,18 +23,17 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
final class CardinalityAggregatorFactory extends ValueSourceAggregatorFactory<ValuesSource> {
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> {
private final long precisionThreshold;
private final boolean rehash;
CardinalityAggregatorFactory(String name, ValuesSourceConfig valuesSourceConfig, long precisionThreshold, boolean rehash) {
super(name, InternalCardinality.TYPE.name(), valuesSourceConfig);
CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold, boolean rehash) {
super(name, InternalCardinality.TYPE.name(), config);
this.precisionThreshold = precisionThreshold;
this.rehash = rehash;
}
@ -50,7 +49,7 @@ final class CardinalityAggregatorFactory extends ValueSourceAggregatorFactory<Va
@Override
protected Aggregator create(ValuesSource valuesSource, long expectedBucketsCount, AggregationContext context, Aggregator parent) {
if (!(valuesSource instanceof NumericValuesSource) && !rehash) {
if (!(valuesSource instanceof ValuesSource.Numeric) && !rehash) {
throw new AggregationExecutionException("Turning off rehashing for cardinality aggregation [" + name + "] on non-numeric values in not allowed");
}
return new CardinalityAggregator(name, parent == null ? 1 : parent.estimatedBucketCount(), valuesSource, rehash, precision(parent), context, parent);

View File

@ -28,10 +28,9 @@ import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -97,7 +96,7 @@ public class CardinalityParser implements Aggregator.Parser {
ValuesSourceConfig<?> config = null;
if (script != null) {
config = new ValuesSourceConfig<>(BytesValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
@ -105,9 +104,9 @@ public class CardinalityParser implements Aggregator.Parser {
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (config == null) {
if (mapper instanceof NumberFieldMapper) {
config = new ValuesSourceConfig<>(NumericValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
} else {
config = new ValuesSourceConfig<>(BytesValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
if (mapper == null) {
config.unmapped(true);
}
@ -121,7 +120,7 @@ public class CardinalityParser implements Aggregator.Parser {
config.fieldContext(new FieldContext(field, indexFieldData));
}
} else if (config == null) {
config = new ValuesSourceConfig<>(BytesValuesSource.class);
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
}
if (rehash == null) {

View File

@ -26,7 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -37,12 +37,12 @@ import java.io.IOException;
*/
public class MaxAggregator extends MetricsAggregator.SingleValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private DoubleArray maxes;
public MaxAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public MaxAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -97,9 +97,9 @@ public class MaxAggregator extends MetricsAggregator.SingleValue {
return new InternalMax(name, Double.NEGATIVE_INFINITY);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, InternalMax.TYPE.name(), valuesSourceConfig);
}
@ -109,7 +109,7 @@ public class MaxAggregator extends MetricsAggregator.SingleValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new MaxAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.max;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -34,7 +34,7 @@ public class MaxParser extends ValuesSourceMetricsAggregatorParser<InternalMax>
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new MaxAggregator.Factory(aggregationName, config);
}

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -37,12 +37,12 @@ import java.io.IOException;
*/
public class MinAggregator extends MetricsAggregator.SingleValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private DoubleArray mins;
public MinAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public MinAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -96,9 +96,9 @@ public class MinAggregator extends MetricsAggregator.SingleValue {
return new InternalMin(name, Double.POSITIVE_INFINITY);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, InternalMin.TYPE.name(), valuesSourceConfig);
}
@ -108,7 +108,7 @@ public class MinAggregator extends MetricsAggregator.SingleValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new MinAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.min;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -39,7 +39,7 @@ public class MinParser extends ValuesSourceMetricsAggregatorParser<InternalMin>
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new MinAggregator.Factory(aggregationName, config);
}
}

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.Iterator;

View File

@ -24,9 +24,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -35,14 +35,14 @@ import java.io.IOException;
*/
public class PercentilesAggregator extends MetricsAggregator.MultiValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private final PercentilesEstimator estimator;
private final boolean keyed;
public PercentilesAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context,
public PercentilesAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context,
Aggregator parent, PercentilesEstimator estimator, boolean keyed) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
@ -96,13 +96,13 @@ public class PercentilesAggregator extends MetricsAggregator.MultiValue {
estimator.release();
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
private final PercentilesEstimator.Factory estimatorFactory;
private final double[] percents;
private final boolean keyed;
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig,
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig,
double[] percents, PercentilesEstimator.Factory estimatorFactory, boolean keyed) {
super(name, InternalPercentiles.TYPE.name(), valuesSourceConfig);
this.estimatorFactory = estimatorFactory;
@ -116,7 +116,7 @@ public class PercentilesAggregator extends MetricsAggregator.MultiValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
PercentilesEstimator estimator = estimatorFactory.create(percents, expectedBucketsCount, aggregationContext);
return new PercentilesAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent, estimator, keyed);
}

View File

@ -26,9 +26,9 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigest;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -55,7 +55,7 @@ public class PercentilesParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<>(NumericValuesSource.class);
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
String script = null;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -29,9 +29,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -40,7 +40,7 @@ import java.io.IOException;
*/
public class StatsAggegator extends MetricsAggregator.MultiValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private LongArray counts;
@ -48,7 +48,7 @@ public class StatsAggegator extends MetricsAggregator.MultiValue {
private DoubleArray mins;
private DoubleArray maxes;
public StatsAggegator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public StatsAggegator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -138,9 +138,9 @@ public class StatsAggegator extends MetricsAggregator.MultiValue {
return new InternalStats(name, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, InternalStats.TYPE.name(), valuesSourceConfig);
}
@ -150,7 +150,7 @@ public class StatsAggegator extends MetricsAggregator.MultiValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new StatsAggegator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.stats;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -34,7 +34,7 @@ public class StatsParser extends ValuesSourceMetricsAggregatorParser<InternalSta
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new StatsAggegator.Factory(aggregationName, config);
}
}

View File

@ -29,9 +29,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -40,7 +40,7 @@ import java.io.IOException;
*/
public class ExtendedStatsAggregator extends MetricsAggregator.MultiValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private LongArray counts;
@ -49,7 +49,7 @@ public class ExtendedStatsAggregator extends MetricsAggregator.MultiValue {
private DoubleArray maxes;
private DoubleArray sumOfSqrs;
public ExtendedStatsAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public ExtendedStatsAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -159,9 +159,9 @@ public class ExtendedStatsAggregator extends MetricsAggregator.MultiValue {
Releasables.release(counts, maxes, mins, sumOfSqrs, sums);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, InternalExtendedStats.TYPE.name(), valuesSourceConfig);
}
@ -171,7 +171,7 @@ public class ExtendedStatsAggregator extends MetricsAggregator.MultiValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new ExtendedStatsAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.stats.extended;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -34,7 +34,7 @@ public class ExtendedStatsParser extends ValuesSourceMetricsAggregatorParser<Int
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new ExtendedStatsAggregator.Factory(aggregationName, config);
}
}

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatterStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;

View File

@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import java.io.IOException;
@ -37,12 +37,12 @@ import java.io.IOException;
*/
public class SumAggregator extends MetricsAggregator.SingleValue {
private final NumericValuesSource valuesSource;
private final ValuesSource.Numeric valuesSource;
private DoubleValues values;
private DoubleArray sums;
public SumAggregator(String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) {
public SumAggregator(String name, long estimatedBucketsCount, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent) {
super(name, estimatedBucketsCount, context, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -91,9 +91,9 @@ public class SumAggregator extends MetricsAggregator.SingleValue {
return new InternalSum(name, 0.0);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<NumericValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
public Factory(String name, ValuesSourceConfig<NumericValuesSource> valuesSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig) {
super(name, InternalSum.TYPE.name(), valuesSourceConfig);
}
@ -103,7 +103,7 @@ public class SumAggregator extends MetricsAggregator.SingleValue {
}
@Override
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new SumAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}
}

View File

@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.metrics.sum;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
@ -34,7 +34,7 @@ public class SumParser extends ValuesSourceMetricsAggregatorParser<InternalSum>
}
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<NumericValuesSource> config) {
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new SumAggregator.Factory(aggregationName, config);
}
}

View File

@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import java.io.IOException;
@ -40,13 +40,13 @@ import java.io.IOException;
*/
public class ValueCountAggregator extends MetricsAggregator.SingleValue {
private final BytesValuesSource valuesSource;
private final ValuesSource.Bytes valuesSource;
private BytesValues values;
// a count per bucket
LongArray counts;
public ValueCountAggregator(String name, long expectedBucketsCount, BytesValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent) {
public ValueCountAggregator(String name, long expectedBucketsCount, ValuesSource.Bytes valuesSource, AggregationContext aggregationContext, Aggregator parent) {
super(name, 0, aggregationContext, parent);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -96,9 +96,9 @@ public class ValueCountAggregator extends MetricsAggregator.SingleValue {
Releasables.release(counts);
}
public static class Factory extends ValueSourceAggregatorFactory.LeafOnly<BytesValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Bytes> {
public Factory(String name, ValuesSourceConfig<BytesValuesSource> valuesSourceBuilder) {
public Factory(String name, ValuesSourceConfig<ValuesSource.Bytes> valuesSourceBuilder) {
super(name, InternalValueCount.TYPE.name(), valuesSourceBuilder);
}
@ -108,7 +108,7 @@ public class ValueCountAggregator extends MetricsAggregator.SingleValue {
}
@Override
protected Aggregator create(BytesValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
protected Aggregator create(ValuesSource.Bytes valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new ValueCountAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
}

View File

@ -24,9 +24,9 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -45,7 +45,7 @@ public class ValueCountParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<BytesValuesSource> config = new ValuesSourceConfig<>(BytesValuesSource.class);
ValuesSourceConfig<ValuesSource.Bytes> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
String field = null;
String script = null;

View File

@ -31,9 +31,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.geopoints.GeoPointValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.util.ArrayList;
@ -48,7 +45,7 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
private final SearchContext searchContext;
private ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource>[] perDepthFieldDataSources = new ObjectObjectOpenHashMap[4];
private ObjectObjectOpenHashMap<ConfigCacheKey, ValuesSource>[] perDepthFieldDataSources = new ObjectObjectOpenHashMap[4];
private List<ReaderContextAware> readerAwares = new ArrayList<>();
private List<ScorerAware> scorerAwares = new ArrayList<>();
@ -104,47 +101,47 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
if (perDepthFieldDataSources[depth] == null) {
perDepthFieldDataSources[depth] = new ObjectObjectOpenHashMap<>();
}
final ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources = perDepthFieldDataSources[depth];
final ObjectObjectOpenHashMap<ConfigCacheKey, ValuesSource> fieldDataSources = perDepthFieldDataSources[depth];
if (config.fieldContext == null) {
if (NumericValuesSource.class.isAssignableFrom(config.valueSourceType)) {
if (ValuesSource.Numeric.class.isAssignableFrom(config.valueSourceType)) {
return (VS) numericScript(config);
}
if (BytesValuesSource.class.isAssignableFrom(config.valueSourceType)) {
if (ValuesSource.Bytes.class.isAssignableFrom(config.valueSourceType)) {
return (VS) bytesScript(config);
}
throw new AggregationExecutionException("value source of type [" + config.valueSourceType.getSimpleName() + "] is not supported by scripts");
}
if (NumericValuesSource.class.isAssignableFrom(config.valueSourceType)) {
if (ValuesSource.Numeric.class.isAssignableFrom(config.valueSourceType)) {
return (VS) numericField(fieldDataSources, config);
}
if (GeoPointValuesSource.class.isAssignableFrom(config.valueSourceType)) {
if (ValuesSource.GeoPoint.class.isAssignableFrom(config.valueSourceType)) {
return (VS) geoPointField(fieldDataSources, config);
}
// falling back to bytes values
return (VS) bytesField(fieldDataSources, config);
}
private NumericValuesSource numericScript(ValuesSourceConfig<?> config) {
private ValuesSource.Numeric numericScript(ValuesSourceConfig<?> config) {
setScorerIfNeeded(config.script);
setReaderIfNeeded(config.script);
scorerAwares.add(config.script);
readerAwares.add(config.script);
FieldDataSource.Numeric source = new FieldDataSource.Numeric.Script(config.script, config.scriptValueType);
ValuesSource.Numeric source = new ValuesSource.Numeric.Script(config.script, config.scriptValueType);
if (config.ensureUnique || config.ensureSorted) {
source = new FieldDataSource.Numeric.SortedAndUnique(source);
source = new ValuesSource.Numeric.SortedAndUnique(source);
readerAwares.add((ReaderContextAware) source);
}
return new NumericValuesSource(source, config.formatter(), config.parser());
return source;
}
private NumericValuesSource numericField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
private ValuesSource.Numeric numericField(ObjectObjectOpenHashMap<ConfigCacheKey, ValuesSource> fieldDataSources, ValuesSourceConfig<?> config) {
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
FieldDataSource.Numeric dataSource = (FieldDataSource.Numeric) fieldDataSources.get(cacheKey);
ValuesSource.Numeric dataSource = (ValuesSource.Numeric) fieldDataSources.get(cacheKey);
if (dataSource == null) {
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
dataSource = new FieldDataSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData(), metaData);
ValuesSource.MetaData metaData = ValuesSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
dataSource = new ValuesSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData(), metaData);
setReaderIfNeeded((ReaderContextAware) dataSource);
readerAwares.add((ReaderContextAware) dataSource);
fieldDataSources.put(cacheKey, dataSource);
@ -154,29 +151,29 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
setReaderIfNeeded(config.script);
scorerAwares.add(config.script);
readerAwares.add(config.script);
dataSource = new FieldDataSource.Numeric.WithScript(dataSource, config.script);
dataSource = new ValuesSource.Numeric.WithScript(dataSource, config.script);
if (config.ensureUnique || config.ensureSorted) {
dataSource = new FieldDataSource.Numeric.SortedAndUnique(dataSource);
dataSource = new ValuesSource.Numeric.SortedAndUnique(dataSource);
readerAwares.add((ReaderContextAware) dataSource);
}
}
if (config.needsHashes) {
dataSource.setNeedsHashes(true);
}
return new NumericValuesSource(dataSource, config.formatter(), config.parser());
return dataSource;
}
private ValuesSource bytesField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
private ValuesSource bytesField(ObjectObjectOpenHashMap<ConfigCacheKey, ValuesSource> fieldDataSources, ValuesSourceConfig<?> config) {
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
FieldDataSource dataSource = fieldDataSources.get(cacheKey);
ValuesSource dataSource = fieldDataSources.get(cacheKey);
if (dataSource == null) {
final IndexFieldData<?> indexFieldData = config.fieldContext.indexFieldData();
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
ValuesSource.MetaData metaData = ValuesSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
dataSource = new FieldDataSource.Bytes.WithOrdinals.FieldData((IndexFieldData.WithOrdinals) indexFieldData, metaData);
dataSource = new ValuesSource.Bytes.WithOrdinals.FieldData((IndexFieldData.WithOrdinals) indexFieldData, metaData);
} else {
dataSource = new FieldDataSource.Bytes.FieldData(indexFieldData, metaData);
dataSource = new ValuesSource.Bytes.FieldData(indexFieldData, metaData);
}
setReaderIfNeeded((ReaderContextAware) dataSource);
readerAwares.add((ReaderContextAware) dataSource);
@ -187,45 +184,41 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
setReaderIfNeeded(config.script);
scorerAwares.add(config.script);
readerAwares.add(config.script);
dataSource = new FieldDataSource.WithScript(dataSource, config.script);
dataSource = new ValuesSource.WithScript(dataSource, config.script);
}
// Even in case we wrap field data, we might still need to wrap for sorting, because the wrapped field data might be
// eg. a numeric field data that doesn't sort according to the byte order. However field data values are unique so no
// need to wrap for uniqueness
if ((config.ensureUnique && !dataSource.metaData().uniqueness().unique()) || config.ensureSorted) {
dataSource = new FieldDataSource.Bytes.SortedAndUnique(dataSource);
dataSource = new ValuesSource.Bytes.SortedAndUnique(dataSource);
readerAwares.add((ReaderContextAware) dataSource);
}
if (config.needsHashes) { // the data source needs hash if at least one consumer needs hashes
dataSource.setNeedsHashes(true);
}
if (dataSource instanceof FieldDataSource.Bytes.WithOrdinals) {
return new BytesValuesSource.WithOrdinals((FieldDataSource.Bytes.WithOrdinals) dataSource);
} else {
return new BytesValuesSource(dataSource);
}
return dataSource;
}
private BytesValuesSource bytesScript(ValuesSourceConfig<?> config) {
private ValuesSource.Bytes bytesScript(ValuesSourceConfig<?> config) {
setScorerIfNeeded(config.script);
setReaderIfNeeded(config.script);
scorerAwares.add(config.script);
readerAwares.add(config.script);
FieldDataSource.Bytes source = new FieldDataSource.Bytes.Script(config.script);
ValuesSource.Bytes source = new ValuesSource.Bytes.Script(config.script);
if (config.ensureUnique || config.ensureSorted) {
source = new FieldDataSource.Bytes.SortedAndUnique(source);
source = new ValuesSource.Bytes.SortedAndUnique(source);
readerAwares.add((ReaderContextAware) source);
}
return new BytesValuesSource(source);
return source;
}
private GeoPointValuesSource geoPointField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
private ValuesSource.GeoPoint geoPointField(ObjectObjectOpenHashMap<ConfigCacheKey, ValuesSource> fieldDataSources, ValuesSourceConfig<?> config) {
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
FieldDataSource.GeoPoint dataSource = (FieldDataSource.GeoPoint) fieldDataSources.get(cacheKey);
ValuesSource.GeoPoint dataSource = (ValuesSource.GeoPoint) fieldDataSources.get(cacheKey);
if (dataSource == null) {
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
dataSource = new FieldDataSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData(), metaData);
ValuesSource.MetaData metaData = ValuesSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
dataSource = new ValuesSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData(), metaData);
setReaderIfNeeded(dataSource);
readerAwares.add(dataSource);
fieldDataSources.put(cacheKey, dataSource);
@ -233,7 +226,7 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
if (config.needsHashes) {
dataSource.setNeedsHashes(true);
}
return new GeoPointValuesSource(dataSource);
return dataSource;
}
public void registerReaderContextAware(ReaderContextAware readerContextAware) {

View File

@ -1,812 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.elasticsearch.common.lucene.ReaderContextAware;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.AtomicFieldData.Order;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.FieldDataSource.Bytes.SortedAndUnique.SortedUniqueBytesValues;
import org.elasticsearch.search.aggregations.support.bytes.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.numeric.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.numeric.ScriptLongValues;
import org.elasticsearch.search.internal.SearchContext;
public abstract class FieldDataSource {
public static class MetaData {
public static final MetaData UNKNOWN = new MetaData();
public enum Uniqueness {
UNIQUE,
NOT_UNIQUE,
UNKNOWN;
public boolean unique() {
return this == UNIQUE;
}
}
private long maxAtomicUniqueValuesCount = -1;
private boolean multiValued = true;
private Uniqueness uniqueness = Uniqueness.UNKNOWN;
private MetaData() {}
private MetaData(MetaData other) {
this.maxAtomicUniqueValuesCount = other.maxAtomicUniqueValuesCount;
this.multiValued = other.multiValued;
this.uniqueness = other.uniqueness;
}
private MetaData(long maxAtomicUniqueValuesCount, boolean multiValued, Uniqueness uniqueness) {
this.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
this.multiValued = multiValued;
this.uniqueness = uniqueness;
}
public long maxAtomicUniqueValuesCount() {
return maxAtomicUniqueValuesCount;
}
public boolean multiValued() {
return multiValued;
}
public Uniqueness uniqueness() {
return uniqueness;
}
public static MetaData load(IndexFieldData indexFieldData, SearchContext context) {
MetaData metaData = new MetaData();
metaData.uniqueness = Uniqueness.UNIQUE;
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
AtomicFieldData fieldData = indexFieldData.load(readerContext);
metaData.multiValued |= fieldData.isMultiValued();
metaData.maxAtomicUniqueValuesCount = Math.max(metaData.maxAtomicUniqueValuesCount, fieldData.getNumberUniqueValues());
}
return metaData;
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(MetaData other) {
return new Builder(other);
}
public static class Builder {
private final MetaData metaData;
private Builder() {
metaData = new MetaData();
}
private Builder(MetaData metaData) {
this.metaData = new MetaData(metaData);
}
public Builder maxAtomicUniqueValuesCount(long maxAtomicUniqueValuesCount) {
metaData.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
return this;
}
public Builder multiValued(boolean multiValued) {
metaData.multiValued = multiValued;
return this;
}
public Builder uniqueness(Uniqueness uniqueness) {
metaData.uniqueness = uniqueness;
return this;
}
public MetaData build() {
return metaData;
}
}
}
/**
* Get the current {@link BytesValues}.
*/
public abstract BytesValues bytesValues();
/**
* Ask the underlying data source to provide pre-computed hashes, optional operation.
*/
public void setNeedsHashes(boolean needsHashes) {}
public abstract MetaData metaData();
public static abstract class Bytes extends FieldDataSource {
public static abstract class WithOrdinals extends Bytes {
public abstract BytesValues.WithOrdinals bytesValues();
public static class FieldData extends WithOrdinals implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
protected final MetaData metaData;
protected AtomicFieldData.WithOrdinals<?> atomicFieldData;
private BytesValues.WithOrdinals bytesValues;
public FieldData(IndexFieldData.WithOrdinals<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
}
@Override
public BytesValues.WithOrdinals bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
}
}
public static class FieldData extends Bytes implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexFieldData<?> indexFieldData;
protected final MetaData metaData;
protected AtomicFieldData<?> atomicFieldData;
private BytesValues bytesValues;
public FieldData(IndexFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
}
public static class Script extends Bytes {
private final ScriptBytesValues values;
public Script(SearchScript script) {
values = new ScriptBytesValues(script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
return values;
}
}
public static class SortedAndUnique extends Bytes implements ReaderContextAware {
private final FieldDataSource delegate;
private final MetaData metaData;
private BytesValues bytesValues;
public SortedAndUnique(FieldDataSource delegate) {
this.delegate = delegate;
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
bytesValues = null; // order may change per-segment -> reset
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = delegate.bytesValues();
if (bytesValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
bytesValues = new SortedUniqueBytesValues(bytesValues);
}
}
return bytesValues;
}
static class SortedUniqueBytesValues extends BytesValues {
final BytesValues delegate;
int[] sortedIds;
final BytesRefHash bytes;
int numUniqueValues;
int pos = Integer.MAX_VALUE;
public SortedUniqueBytesValues(BytesValues delegate) {
super(delegate.isMultiValued());
this.delegate = delegate;
bytes = new BytesRefHash();
}
@Override
public int setDocument(int docId) {
final int numValues = delegate.setDocument(docId);
if (numValues == 0) {
sortedIds = null;
return 0;
}
bytes.clear();
bytes.reinit();
for (int i = 0; i < numValues; ++i) {
final BytesRef next = delegate.nextValue();
final int hash = delegate.currentValueHash();
assert hash == next.hashCode();
bytes.add(next, hash);
}
numUniqueValues = bytes.size();
sortedIds = bytes.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
pos = 0;
return numUniqueValues;
}
@Override
public BytesRef nextValue() {
bytes.get(sortedIds[pos++], scratch);
return scratch;
}
@Override
public Order getOrder() {
return Order.BYTES;
}
}
}
}
public static abstract class Numeric extends FieldDataSource {
/** Whether the underlying data is floating-point or not. */
public abstract boolean isFloatingPoint();
/** Get the current {@link LongValues}. */
public abstract LongValues longValues();
/** Get the current {@link DoubleValues}. */
public abstract DoubleValues doubleValues();
public static class WithScript extends Numeric {
private final LongValues longValues;
private final DoubleValues doubleValues;
private final FieldDataSource.WithScript.BytesValues bytesValues;
public WithScript(Numeric delegate, SearchScript script) {
this.longValues = new LongValues(delegate, script);
this.doubleValues = new DoubleValues(delegate, script);
this.bytesValues = new FieldDataSource.WithScript.BytesValues(delegate, script);
}
@Override
public boolean isFloatingPoint() {
return true; // even if the underlying source produces longs, scripts can change them to doubles
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
@Override
public LongValues longValues() {
return longValues;
}
@Override
public DoubleValues doubleValues() {
return doubleValues;
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final Numeric source;
private final SearchScript script;
public LongValues(Numeric source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.longValues().setDocument(docId);
}
@Override
public long nextValue() {
script.setNextVar("_value", source.longValues().nextValue());
return script.runAsLong();
}
}
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues {
private final Numeric source;
private final SearchScript script;
public DoubleValues(Numeric source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.doubleValues().setDocument(docId);
}
@Override
public double nextValue() {
script.setNextVar("_value", source.doubleValues().nextValue());
return script.runAsDouble();
}
}
}
public static class FieldData extends Numeric implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexNumericFieldData<?> indexFieldData;
protected final MetaData metaData;
protected AtomicNumericFieldData atomicFieldData;
private BytesValues bytesValues;
private LongValues longValues;
private DoubleValues doubleValues;
public FieldData(IndexNumericFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public boolean isFloatingPoint() {
return indexFieldData.getNumericType().isFloatingPoint();
}
@Override
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
if (longValues != null) {
longValues = atomicFieldData.getLongValues();
}
if (doubleValues != null) {
doubleValues = atomicFieldData.getDoubleValues();
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
@Override
public org.elasticsearch.index.fielddata.LongValues longValues() {
if (longValues == null) {
longValues = atomicFieldData.getLongValues();
}
assert longValues.getOrder() == Order.NUMERIC;
return longValues;
}
@Override
public org.elasticsearch.index.fielddata.DoubleValues doubleValues() {
if (doubleValues == null) {
doubleValues = atomicFieldData.getDoubleValues();
}
assert doubleValues.getOrder() == Order.NUMERIC;
return doubleValues;
}
}
public static class Script extends Numeric {
private final ScriptValueType scriptValueType;
private final ScriptDoubleValues doubleValues;
private final ScriptLongValues longValues;
private final ScriptBytesValues bytesValues;
public Script(SearchScript script, ScriptValueType scriptValueType) {
this.scriptValueType = scriptValueType;
longValues = new ScriptLongValues(script);
doubleValues = new ScriptDoubleValues(script);
bytesValues = new ScriptBytesValues(script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public boolean isFloatingPoint() {
return scriptValueType != null ? scriptValueType.isFloatingPoint() : true;
}
@Override
public LongValues longValues() {
return longValues;
}
@Override
public DoubleValues doubleValues() {
return doubleValues;
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
}
public static class SortedAndUnique extends Numeric implements ReaderContextAware {
private final Numeric delegate;
private final MetaData metaData;
private LongValues longValues;
private DoubleValues doubleValues;
private BytesValues bytesValues;
public SortedAndUnique(Numeric delegate) {
this.delegate = delegate;
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public boolean isFloatingPoint() {
return delegate.isFloatingPoint();
}
@Override
public void setNextReader(AtomicReaderContext reader) {
longValues = null; // order may change per-segment -> reset
doubleValues = null;
bytesValues = null;
}
@Override
public org.elasticsearch.index.fielddata.LongValues longValues() {
if (longValues == null) {
longValues = delegate.longValues();
if (longValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || longValues.getOrder() != Order.NUMERIC)) {
longValues = new SortedUniqueLongValues(longValues);
}
}
return longValues;
}
@Override
public org.elasticsearch.index.fielddata.DoubleValues doubleValues() {
if (doubleValues == null) {
doubleValues = delegate.doubleValues();
if (doubleValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || doubleValues.getOrder() != Order.NUMERIC)) {
doubleValues = new SortedUniqueDoubleValues(doubleValues);
}
}
return doubleValues;
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = delegate.bytesValues();
if (bytesValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
bytesValues = new SortedUniqueBytesValues(bytesValues);
}
}
return bytesValues;
}
private static class SortedUniqueLongValues extends FilterLongValues {
int numUniqueValues;
long[] array = new long[2];
int pos = Integer.MAX_VALUE;
final InPlaceMergeSorter sorter = new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
final long tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
@Override
protected int compare(int i, int j) {
final long l1 = array[i];
final long l2 = array[j];
return Long.compare(l1, l2);
}
};
protected SortedUniqueLongValues(LongValues delegate) {
super(delegate);
}
@Override
public int setDocument(int docId) {
final int numValues = super.setDocument(docId);
array = ArrayUtil.grow(array, numValues);
for (int i = 0; i < numValues; ++i) {
array[i] = super.nextValue();
}
pos = 0;
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
}
@Override
public long nextValue() {
assert pos < numUniqueValues;
return array[pos++];
}
@Override
public Order getOrder() {
return Order.NUMERIC;
}
}
private static class SortedUniqueDoubleValues extends FilterDoubleValues {
int numUniqueValues;
double[] array = new double[2];
int pos = Integer.MAX_VALUE;
final InPlaceMergeSorter sorter = new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
final double tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
@Override
protected int compare(int i, int j) {
return Double.compare(array[i], array[j]);
}
};
SortedUniqueDoubleValues(DoubleValues delegate) {
super(delegate);
}
@Override
public int setDocument(int docId) {
final int numValues = super.setDocument(docId);
array = ArrayUtil.grow(array, numValues);
for (int i = 0; i < numValues; ++i) {
array[i] = super.nextValue();
}
pos = 0;
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
}
@Override
public double nextValue() {
assert pos < numUniqueValues;
return array[pos++];
}
@Override
public Order getOrder() {
return Order.NUMERIC;
}
}
}
}
// No need to implement ReaderContextAware here, the delegate already takes care of updating data structures
public static class WithScript extends Bytes {
private final BytesValues bytesValues;
public WithScript(FieldDataSource delegate, SearchScript script) {
this.bytesValues = new BytesValues(delegate, script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues {
private final FieldDataSource source;
private final SearchScript script;
public BytesValues(FieldDataSource source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.bytesValues().setDocument(docId);
}
@Override
public BytesRef nextValue() {
BytesRef value = source.bytesValues().nextValue();
script.setNextVar("_value", value.utf8ToString());
scratch.copyChars(script.run().toString());
return scratch;
}
}
}
public static class GeoPoint extends FieldDataSource implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexGeoPointFieldData<?> indexFieldData;
private final MetaData metaData;
protected AtomicGeoPointFieldData<?> atomicFieldData;
private BytesValues bytesValues;
private GeoPointValues geoPointValues;
public GeoPoint(IndexGeoPointFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
if (geoPointValues != null) {
geoPointValues = atomicFieldData.getGeoPointValues();
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
public org.elasticsearch.index.fielddata.GeoPointValues geoPointValues() {
if (geoPointValues == null) {
geoPointValues = atomicFieldData.getGeoPointValues();
}
return geoPointValues;
}
}
}

View File

@ -19,17 +19,14 @@
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
/**
*
*/
public enum ScriptValueType {
STRING(BytesValuesSource.class),
LONG(NumericValuesSource.class),
DOUBLE(NumericValuesSource.class);
STRING(ValuesSource.Bytes.class),
LONG(ValuesSource.Numeric.class),
DOUBLE(ValuesSource.Numeric.class);
final Class<? extends ValuesSource> valuesSourceType;

View File

@ -18,18 +18,795 @@
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.index.fielddata.BytesValues;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.elasticsearch.common.lucene.ReaderContextAware;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.AtomicFieldData.Order;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import org.elasticsearch.search.internal.SearchContext;
/**
* An abstraction of a source from which values are resolved per document.
*/
public interface ValuesSource {
public abstract class ValuesSource {
FieldDataSource.MetaData metaData();
public static class MetaData {
public static final MetaData UNKNOWN = new MetaData();
public enum Uniqueness {
UNIQUE,
NOT_UNIQUE,
UNKNOWN;
public boolean unique() {
return this == UNIQUE;
}
}
private long maxAtomicUniqueValuesCount = -1;
private boolean multiValued = true;
private Uniqueness uniqueness = Uniqueness.UNKNOWN;
private MetaData() {}
private MetaData(MetaData other) {
this.maxAtomicUniqueValuesCount = other.maxAtomicUniqueValuesCount;
this.multiValued = other.multiValued;
this.uniqueness = other.uniqueness;
}
private MetaData(long maxAtomicUniqueValuesCount, boolean multiValued, Uniqueness uniqueness) {
this.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
this.multiValued = multiValued;
this.uniqueness = uniqueness;
}
public long maxAtomicUniqueValuesCount() {
return maxAtomicUniqueValuesCount;
}
public boolean multiValued() {
return multiValued;
}
public Uniqueness uniqueness() {
return uniqueness;
}
public static MetaData load(IndexFieldData indexFieldData, SearchContext context) {
MetaData metaData = new MetaData();
metaData.uniqueness = Uniqueness.UNIQUE;
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
AtomicFieldData fieldData = indexFieldData.load(readerContext);
metaData.multiValued |= fieldData.isMultiValued();
metaData.maxAtomicUniqueValuesCount = Math.max(metaData.maxAtomicUniqueValuesCount, fieldData.getNumberUniqueValues());
}
return metaData;
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(MetaData other) {
return new Builder(other);
}
public static class Builder {
private final MetaData metaData;
private Builder() {
metaData = new MetaData();
}
private Builder(MetaData metaData) {
this.metaData = new MetaData(metaData);
}
public Builder maxAtomicUniqueValuesCount(long maxAtomicUniqueValuesCount) {
metaData.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
return this;
}
public Builder multiValued(boolean multiValued) {
metaData.multiValued = multiValued;
return this;
}
public Builder uniqueness(Uniqueness uniqueness) {
metaData.uniqueness = uniqueness;
return this;
}
public MetaData build() {
return metaData;
}
}
}
/**
* @return A {@link org.apache.lucene.util.BytesRef bytesref} view over the values that are resolved from this value source.
* Get the current {@link BytesValues}.
*/
BytesValues bytesValues();
public abstract BytesValues bytesValues();
/**
* Ask the underlying data source to provide pre-computed hashes, optional operation.
*/
public void setNeedsHashes(boolean needsHashes) {}
public abstract MetaData metaData();
public static abstract class Bytes extends ValuesSource {
public static abstract class WithOrdinals extends Bytes {
public abstract BytesValues.WithOrdinals bytesValues();
public static class FieldData extends WithOrdinals implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
protected final MetaData metaData;
protected AtomicFieldData.WithOrdinals<?> atomicFieldData;
private BytesValues.WithOrdinals bytesValues;
public FieldData(IndexFieldData.WithOrdinals<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
}
@Override
public BytesValues.WithOrdinals bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
}
}
public static class FieldData extends Bytes implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexFieldData<?> indexFieldData;
protected final MetaData metaData;
protected AtomicFieldData<?> atomicFieldData;
private BytesValues bytesValues;
public FieldData(IndexFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
}
public static class Script extends Bytes {
private final ScriptBytesValues values;
public Script(SearchScript script) {
values = new ScriptBytesValues(script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
return values;
}
}
public static class SortedAndUnique extends Bytes implements ReaderContextAware {
private final ValuesSource delegate;
private final MetaData metaData;
private BytesValues bytesValues;
public SortedAndUnique(ValuesSource delegate) {
this.delegate = delegate;
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
bytesValues = null; // order may change per-segment -> reset
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = delegate.bytesValues();
if (bytesValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
bytesValues = new SortedUniqueBytesValues(bytesValues);
}
}
return bytesValues;
}
static class SortedUniqueBytesValues extends BytesValues {
final BytesValues delegate;
int[] sortedIds;
final BytesRefHash bytes;
int numUniqueValues;
int pos = Integer.MAX_VALUE;
public SortedUniqueBytesValues(BytesValues delegate) {
super(delegate.isMultiValued());
this.delegate = delegate;
bytes = new BytesRefHash();
}
@Override
public int setDocument(int docId) {
final int numValues = delegate.setDocument(docId);
if (numValues == 0) {
sortedIds = null;
return 0;
}
bytes.clear();
bytes.reinit();
for (int i = 0; i < numValues; ++i) {
final BytesRef next = delegate.nextValue();
final int hash = delegate.currentValueHash();
assert hash == next.hashCode();
bytes.add(next, hash);
}
numUniqueValues = bytes.size();
sortedIds = bytes.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
pos = 0;
return numUniqueValues;
}
@Override
public BytesRef nextValue() {
bytes.get(sortedIds[pos++], scratch);
return scratch;
}
@Override
public Order getOrder() {
return Order.BYTES;
}
}
}
}
public static abstract class Numeric extends ValuesSource {
/** Whether the underlying data is floating-point or not. */
public abstract boolean isFloatingPoint();
/** Get the current {@link LongValues}. */
public abstract LongValues longValues();
/** Get the current {@link DoubleValues}. */
public abstract DoubleValues doubleValues();
public static class WithScript extends Numeric {
private final LongValues longValues;
private final DoubleValues doubleValues;
private final ValuesSource.WithScript.BytesValues bytesValues;
public WithScript(Numeric delegate, SearchScript script) {
this.longValues = new LongValues(delegate, script);
this.doubleValues = new DoubleValues(delegate, script);
this.bytesValues = new ValuesSource.WithScript.BytesValues(delegate, script);
}
@Override
public boolean isFloatingPoint() {
return true; // even if the underlying source produces longs, scripts can change them to doubles
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
@Override
public LongValues longValues() {
return longValues;
}
@Override
public DoubleValues doubleValues() {
return doubleValues;
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final Numeric source;
private final SearchScript script;
public LongValues(Numeric source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.longValues().setDocument(docId);
}
@Override
public long nextValue() {
script.setNextVar("_value", source.longValues().nextValue());
return script.runAsLong();
}
}
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues {
private final Numeric source;
private final SearchScript script;
public DoubleValues(Numeric source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.doubleValues().setDocument(docId);
}
@Override
public double nextValue() {
script.setNextVar("_value", source.doubleValues().nextValue());
return script.runAsDouble();
}
}
}
public static class FieldData extends Numeric implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexNumericFieldData<?> indexFieldData;
protected final MetaData metaData;
protected AtomicNumericFieldData atomicFieldData;
private BytesValues bytesValues;
private LongValues longValues;
private DoubleValues doubleValues;
public FieldData(IndexNumericFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public boolean isFloatingPoint() {
return indexFieldData.getNumericType().isFloatingPoint();
}
@Override
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
if (longValues != null) {
longValues = atomicFieldData.getLongValues();
}
if (doubleValues != null) {
doubleValues = atomicFieldData.getDoubleValues();
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
@Override
public org.elasticsearch.index.fielddata.LongValues longValues() {
if (longValues == null) {
longValues = atomicFieldData.getLongValues();
}
assert longValues.getOrder() == Order.NUMERIC;
return longValues;
}
@Override
public org.elasticsearch.index.fielddata.DoubleValues doubleValues() {
if (doubleValues == null) {
doubleValues = atomicFieldData.getDoubleValues();
}
assert doubleValues.getOrder() == Order.NUMERIC;
return doubleValues;
}
}
public static class Script extends Numeric {
private final ScriptValueType scriptValueType;
private final ScriptDoubleValues doubleValues;
private final ScriptLongValues longValues;
private final ScriptBytesValues bytesValues;
public Script(SearchScript script, ScriptValueType scriptValueType) {
this.scriptValueType = scriptValueType;
longValues = new ScriptLongValues(script);
doubleValues = new ScriptDoubleValues(script);
bytesValues = new ScriptBytesValues(script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public boolean isFloatingPoint() {
return scriptValueType != null ? scriptValueType.isFloatingPoint() : true;
}
@Override
public LongValues longValues() {
return longValues;
}
@Override
public DoubleValues doubleValues() {
return doubleValues;
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
}
public static class SortedAndUnique extends Numeric implements ReaderContextAware {
private final Numeric delegate;
private final MetaData metaData;
private LongValues longValues;
private DoubleValues doubleValues;
private BytesValues bytesValues;
public SortedAndUnique(Numeric delegate) {
this.delegate = delegate;
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public boolean isFloatingPoint() {
return delegate.isFloatingPoint();
}
@Override
public void setNextReader(AtomicReaderContext reader) {
longValues = null; // order may change per-segment -> reset
doubleValues = null;
bytesValues = null;
}
@Override
public org.elasticsearch.index.fielddata.LongValues longValues() {
if (longValues == null) {
longValues = delegate.longValues();
if (longValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || longValues.getOrder() != Order.NUMERIC)) {
longValues = new SortedUniqueLongValues(longValues);
}
}
return longValues;
}
@Override
public org.elasticsearch.index.fielddata.DoubleValues doubleValues() {
if (doubleValues == null) {
doubleValues = delegate.doubleValues();
if (doubleValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || doubleValues.getOrder() != Order.NUMERIC)) {
doubleValues = new SortedUniqueDoubleValues(doubleValues);
}
}
return doubleValues;
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = delegate.bytesValues();
if (bytesValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
bytesValues = new SortedUniqueBytesValues(bytesValues);
}
}
return bytesValues;
}
private static class SortedUniqueLongValues extends FilterLongValues {
int numUniqueValues;
long[] array = new long[2];
int pos = Integer.MAX_VALUE;
final InPlaceMergeSorter sorter = new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
final long tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
@Override
protected int compare(int i, int j) {
final long l1 = array[i];
final long l2 = array[j];
return Long.compare(l1, l2);
}
};
protected SortedUniqueLongValues(LongValues delegate) {
super(delegate);
}
@Override
public int setDocument(int docId) {
final int numValues = super.setDocument(docId);
array = ArrayUtil.grow(array, numValues);
for (int i = 0; i < numValues; ++i) {
array[i] = super.nextValue();
}
pos = 0;
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
}
@Override
public long nextValue() {
assert pos < numUniqueValues;
return array[pos++];
}
@Override
public Order getOrder() {
return Order.NUMERIC;
}
}
private static class SortedUniqueDoubleValues extends FilterDoubleValues {
int numUniqueValues;
double[] array = new double[2];
int pos = Integer.MAX_VALUE;
final InPlaceMergeSorter sorter = new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
final double tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
@Override
protected int compare(int i, int j) {
return Double.compare(array[i], array[j]);
}
};
SortedUniqueDoubleValues(DoubleValues delegate) {
super(delegate);
}
@Override
public int setDocument(int docId) {
final int numValues = super.setDocument(docId);
array = ArrayUtil.grow(array, numValues);
for (int i = 0; i < numValues; ++i) {
array[i] = super.nextValue();
}
pos = 0;
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
}
@Override
public double nextValue() {
assert pos < numUniqueValues;
return array[pos++];
}
@Override
public Order getOrder() {
return Order.NUMERIC;
}
}
}
}
// No need to implement ReaderContextAware here, the delegate already takes care of updating data structures
public static class WithScript extends Bytes {
private final BytesValues bytesValues;
public WithScript(ValuesSource delegate, SearchScript script) {
this.bytesValues = new BytesValues(delegate, script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues {
private final ValuesSource source;
private final SearchScript script;
public BytesValues(ValuesSource source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
}
@Override
public int setDocument(int docId) {
return source.bytesValues().setDocument(docId);
}
@Override
public BytesRef nextValue() {
BytesRef value = source.bytesValues().nextValue();
script.setNextVar("_value", value.utf8ToString());
scratch.copyChars(script.run().toString());
return scratch;
}
}
}
public static class GeoPoint extends ValuesSource implements ReaderContextAware {
protected boolean needsHashes;
protected final IndexGeoPointFieldData<?> indexFieldData;
private final MetaData metaData;
protected AtomicGeoPointFieldData<?> atomicFieldData;
private BytesValues bytesValues;
private GeoPointValues geoPointValues;
public GeoPoint(IndexGeoPointFieldData<?> indexFieldData, MetaData metaData) {
this.indexFieldData = indexFieldData;
this.metaData = metaData;
needsHashes = false;
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public final void setNeedsHashes(boolean needsHashes) {
this.needsHashes = needsHashes;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
atomicFieldData = indexFieldData.load(reader);
if (bytesValues != null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
if (geoPointValues != null) {
geoPointValues = atomicFieldData.getGeoPointValues();
}
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = atomicFieldData.getBytesValues(needsHashes);
}
return bytesValues;
}
public org.elasticsearch.index.fielddata.GeoPointValues geoPointValues() {
if (geoPointValues == null) {
geoPointValues = atomicFieldData.getGeoPointValues();
}
return geoPointValues;
}
}
}

View File

@ -19,49 +19,58 @@
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
/**
*
*/
public abstract class ValueSourceAggregatorFactory<VS extends ValuesSource> extends AggregatorFactory implements ValuesSourceBased {
public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> extends AggregatorFactory {
public static abstract class LeafOnly<VS extends ValuesSource> extends ValueSourceAggregatorFactory<VS> {
public static abstract class LeafOnly<VS extends ValuesSource> extends ValuesSourceAggregatorFactory<VS> {
protected LeafOnly(String name, String type, ValuesSourceConfig<VS> valuesSourceConfig) {
super(name, type, valuesSourceConfig);
}
protected LeafOnly(String name, String type, ValuesSourceConfig<VS> valuesSourceConfig, ValueFormatter formatter, ValueParser parser) {
super(name, type, valuesSourceConfig, formatter, parser);
}
@Override
public AggregatorFactory subFactories(AggregatorFactories subFactories) {
throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + type + "] cannot accept sub-aggregations");
}
}
protected ValuesSourceConfig<VS> valuesSourceConfig;
protected ValuesSourceConfig<VS> config;
protected ValueFormatter formatter;
protected ValueParser parser;
protected ValueSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> valuesSourceConfig) {
super(name, type);
this.valuesSourceConfig = valuesSourceConfig;
protected ValuesSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> config) {
this(name, type, config, null, null);
}
@Override
public ValuesSourceConfig valuesSourceConfig() {
return valuesSourceConfig;
protected ValuesSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> config, ValueFormatter formatter, ValueParser parser) {
super(name, type);
this.config = config;
this.formatter = formatter;
this.parser = parser;
}
@Override
public Aggregator create(AggregationContext context, Aggregator parent, long expectedBucketsCount) {
if (valuesSourceConfig.unmapped()) {
if (config.unmapped()) {
return createUnmapped(context, parent);
}
VS vs = context.valuesSource(valuesSourceConfig, parent == null ? 0 : 1 + parent.depth());
VS vs = context.valuesSource(config, parent == null ? 0 : 1 + parent.depth());
return create(vs, expectedBucketsCount, context, parent);
}
@Override
public void doValidate() {
if (valuesSourceConfig == null || !valuesSourceConfig.valid()) {
valuesSourceConfig = resolveValuesSourceConfigFromAncestors(name, parent, valuesSourceConfig.valueSourceType());
if (config == null || !config.valid()) {
resolveValuesSourceConfigFromAncestors(name, parent, config.valueSourceType());
}
}
@ -69,14 +78,17 @@ public abstract class ValueSourceAggregatorFactory<VS extends ValuesSource> exte
protected abstract Aggregator create(VS valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent);
private static <VS extends ValuesSource> ValuesSourceConfig<VS> resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class<VS> requiredValuesSourceType) {
private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class<VS> requiredValuesSourceType) {
ValuesSourceConfig config;
while (parent != null) {
if (parent instanceof ValuesSourceBased) {
config = ((ValuesSourceBased) parent).valuesSourceConfig();
if (parent instanceof ValuesSourceAggregatorFactory) {
config = ((ValuesSourceAggregatorFactory) parent).config;
if (config != null && config.valid()) {
if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType())) {
return (ValuesSourceConfig<VS>) config;
if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) {
this.config = config;
this.formatter = ((ValuesSourceAggregatorFactory) parent).formatter;
this.parser = ((ValuesSourceAggregatorFactory) parent).parser;
return;
}
}
}

View File

@ -1,28 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
/**
*
*/
public interface ValuesSourceBased {
ValuesSourceConfig valuesSourceConfig();
}

View File

@ -19,8 +19,6 @@
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.numeric.ValueFormatter;
import org.elasticsearch.search.aggregations.support.numeric.ValueParser;
/**
*
@ -30,8 +28,6 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
final Class<VS> valueSourceType;
FieldContext fieldContext;
SearchScript script;
ValueFormatter formatter;
ValueParser parser;
ScriptValueType scriptValueType;
boolean unmapped = false;
boolean needsHashes = false;
@ -68,24 +64,6 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
return this;
}
public ValuesSourceConfig<VS> formatter(ValueFormatter formatter) {
this.formatter = formatter;
return this;
}
public ValueFormatter formatter() {
return formatter;
}
public ValuesSourceConfig<VS> parser(ValueParser parser) {
this.parser = parser;
return this;
}
public ValueParser parser() {
return parser;
}
public ValuesSourceConfig<VS> scriptValueType(ScriptValueType scriptValueType) {
this.scriptValueType = scriptValueType;
return this;

View File

@ -1,62 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.bytes;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.search.aggregations.support.FieldDataSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
/**
*
*/
public class BytesValuesSource implements ValuesSource {
private final FieldDataSource source;
public BytesValuesSource(FieldDataSource source) {
this.source = source;
}
@Override
public FieldDataSource.MetaData metaData() {
return source.metaData();
}
@Override
public BytesValues bytesValues() {
return source.bytesValues();
}
public static final class WithOrdinals extends BytesValuesSource {
private final FieldDataSource.Bytes.WithOrdinals source;
public WithOrdinals(FieldDataSource.Bytes.WithOrdinals source) {
super(source);
this.source = source;
}
@Override
public BytesValues.WithOrdinals bytesValues() {
return source.bytesValues();
}
}
}

View File

@ -16,8 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
package org.elasticsearch.search.aggregations.support.format;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@ -39,6 +42,7 @@ public interface ValueFormatter extends Streamable {
public final static ValueFormatter RAW = new Raw();
public final static ValueFormatter IPv4 = new IPv4Formatter();
public final static ValueFormatter GEOHASH = new GeoHash();
/**
* Uniquely identifies this formatter (used for efficient serialization)
@ -218,7 +222,34 @@ public interface ValueFormatter extends Streamable {
}
}
static class GeoHash implements ValueFormatter {
static final byte ID = 8;
@Override
public byte id() {
return ID;
}
@Override
public String format(long value) {
return GeoHashUtils.toString(value);
}
@Override
public String format(double value) {
return format((long) value);
}
@Override
public void readFrom(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
}
}

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
package org.elasticsearch.search.aggregations.support.format;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.io.stream.StreamInput;
@ -37,6 +37,7 @@ public class ValueFormatterStreams {
case ValueFormatter.IPv4Formatter.ID: return ValueFormatter.IPv4;
case ValueFormatter.DateTime.ID: formatter = new ValueFormatter.DateTime(); break;
case ValueFormatter.Number.Pattern.ID: formatter = new ValueFormatter.Number.Pattern(); break;
case ValueFormatter.GeoHash.ID: formatter = ValueFormatter.GEOHASH; break;
default: throw new ElasticsearchIllegalArgumentException("Unknown value formatter with id [" + id + "]");
}
formatter.readFrom(in);

View File

@ -16,15 +16,21 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
package org.elasticsearch.search.aggregations.support.format;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.internal.SearchContext;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
/**
@ -32,7 +38,8 @@ import java.util.concurrent.TimeUnit;
*/
public interface ValueParser {
static final ValueParser IPv4 = new ValueParser.IPv4();
static final ValueParser IPv4 = new IPv4();
static final ValueParser RAW = new Raw();
long parseLong(String value, SearchContext searchContext);
@ -111,4 +118,56 @@ public interface ValueParser {
return parseLong(value, searchContext);
}
}
static class Raw implements ValueParser {
private Raw() {
}
@Override
public long parseLong(String value, SearchContext searchContext) {
return Long.parseLong(value);
}
@Override
public double parseDouble(String value, SearchContext searchContext) {
return Double.parseDouble(value);
}
}
public static abstract class Number implements ValueParser {
NumberFormat format;
Number(NumberFormat format) {
this.format = format;
}
public static class Pattern extends Number {
private static final DecimalFormatSymbols SYMBOLS = new DecimalFormatSymbols(Locale.ROOT);
public Pattern(String pattern) {
super(new DecimalFormat(pattern, SYMBOLS));
}
@Override
public long parseLong(String value, SearchContext searchContext) {
try {
return format.parse(value).longValue();
} catch (ParseException nfe) {
throw new AggregationExecutionException("Invalid number format [" + ((DecimalFormat) format).toPattern() + "]");
}
}
@Override
public double parseDouble(String value, SearchContext searchContext) {
try {
return format.parse(value).doubleValue();
} catch (ParseException nfe) {
throw new AggregationExecutionException("Invalid number format [" + ((DecimalFormat) format).toPattern() + "]");
}
}
}
}
}

View File

@ -1,51 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.geopoints;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.search.aggregations.support.FieldDataSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
/**
* A source of geo points.
*/
public final class GeoPointValuesSource implements ValuesSource {
private final FieldDataSource.GeoPoint source;
public GeoPointValuesSource(FieldDataSource.GeoPoint source) {
this.source = source;
}
@Override
public FieldDataSource.MetaData metaData() {
return source.metaData();
}
@Override
public BytesValues bytesValues() {
return source.bytesValues();
}
public final GeoPointValues values() {
return source.geoPointValues();
}
}

View File

@ -1,73 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.aggregations.support.FieldDataSource;
import org.elasticsearch.search.aggregations.support.ValuesSource;
/**
* A source of numeric data.
*/
public final class NumericValuesSource implements ValuesSource {
private final FieldDataSource.Numeric source;
private final ValueFormatter formatter;
private final ValueParser parser;
public NumericValuesSource(FieldDataSource.Numeric source, @Nullable ValueFormatter formatter, @Nullable ValueParser parser) {
this.source = source;
this.formatter = formatter;
this.parser = parser;
}
@Override
public FieldDataSource.MetaData metaData() {
return source.metaData();
}
@Override
public BytesValues bytesValues() {
return source.bytesValues();
}
public boolean isFloatingPoint() {
return source.isFloatingPoint();
}
public LongValues longValues() {
return source.longValues();
}
public DoubleValues doubleValues() {
return source.doubleValues();
}
public ValueFormatter formatter() {
return formatter;
}
public ValueParser parser() {
return parser;
}
}

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.bytes;
package org.elasticsearch.search.aggregations.support.values;
import com.google.common.collect.Iterators;
import org.apache.lucene.util.BytesRef;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
package org.elasticsearch.search.aggregations.support.values;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.index.fielddata.DoubleValues;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.numeric;
package org.elasticsearch.search.aggregations.support.values;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.index.fielddata.LongValues;

View File

@ -158,6 +158,48 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
@Test
public void singleValueField_WithFormat() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(range("range")
.field(SINGLE_VALUED_FIELD_NAME)
.addUnboundedTo(3)
.addRange(3, 6)
.addUnboundedFrom(6)
.format("#")
)
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3");
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3-6");
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3-6"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("6-*");
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
}
@Test
public void singleValueField_WithCustomKey() throws Exception {
SearchResponse response = client().prepareSearch("idx")

View File

@ -116,7 +116,7 @@ public class FieldDataSourceTests extends ElasticsearchTestCase {
@Test
public void bytesValuesWithScript() {
final BytesValues values = randomBytesValues();
FieldDataSource source = new FieldDataSource.Bytes() {
ValuesSource source = new ValuesSource.Bytes() {
@Override
public BytesValues bytesValues() {
@ -130,12 +130,12 @@ public class FieldDataSourceTests extends ElasticsearchTestCase {
};
SearchScript script = randomScript();
assertConsistent(new FieldDataSource.WithScript.BytesValues(source, script));
assertConsistent(new ValuesSource.WithScript.BytesValues(source, script));
}
@Test
public void sortedUniqueBytesValues() {
assertConsistent(new FieldDataSource.Bytes.SortedAndUnique.SortedUniqueBytesValues(randomBytesValues()));
assertConsistent(new ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues(randomBytesValues()));
}
}

View File

@ -24,9 +24,9 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.bytes.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.numeric.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.numeric.ScriptLongValues;
import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;