Aggregation cleanup

- consolidated value source parsing under a single parser that is reused in all the value source aggs parsers
- consolidated include/exclude parsing under a single parser
- cleaned up value format handling, to have consistent behaviour across all values source aggs
This commit is contained in:
uboness 2014-04-03 12:00:35 +02:00
parent c6caeea887
commit c9b0b04f55
57 changed files with 1199 additions and 1290 deletions

View File

@ -449,12 +449,12 @@ public class GeoHashUtils {
*/ */
public static String toString(long geohashAsLong) public static String toString(long geohashAsLong)
{ {
int precision= (int) (geohashAsLong&15); int precision = (int) (geohashAsLong&15);
char[] chars=new char[precision]; char[] chars = new char[precision];
geohashAsLong>>=4; geohashAsLong >>= 4;
for (int i = precision-1; i >=0 ; i--) { for (int i = precision - 1; i >= 0 ; i--) {
chars[i]= BASE_32[(int) (geohashAsLong&31)]; chars[i] = BASE_32[(int) (geohashAsLong & 31)];
geohashAsLong>>=5; geohashAsLong >>= 5;
} }
return new String(chars); return new String(chars);
} }

View File

@ -39,7 +39,7 @@ public class AggregatorFactories {
public static final AggregatorFactories EMPTY = new Empty(); public static final AggregatorFactories EMPTY = new Empty();
private final AggregatorFactory[] factories; private AggregatorFactory[] factories;
public static Builder builder() { public static Builder builder() {
return new Builder(); return new Builder();

View File

@ -74,6 +74,11 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
public BytesReference stream() { public BytesReference stream() {
return stream; return stream;
} }
@Override
public String toString() {
return name;
}
} }
protected static class ReduceContext { protected static class ReduceContext {

View File

@ -21,8 +21,10 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder; import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -53,21 +55,19 @@ public class GeoHashGridParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
String field = null; ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context).build();
int precision = DEFAULT_PRECISION; int precision = DEFAULT_PRECISION;
int requiredSize = DEFAULT_MAX_NUM_CELLS; int requiredSize = DEFAULT_MAX_NUM_CELLS;
int shardSize = -1; int shardSize = -1;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
}
} else if (token == XContentParser.Token.VALUE_NUMBER) { } else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("precision".equals(currentFieldName)) { if ("precision".equals(currentFieldName)) {
precision = parser.intValue(); precision = parser.intValue();
@ -76,7 +76,6 @@ public class GeoHashGridParser implements Aggregator.Parser {
} else if ("shard_size".equals(currentFieldName) || "shardSize".equals(currentFieldName)) { } else if ("shard_size".equals(currentFieldName) || "shardSize".equals(currentFieldName)) {
shardSize = parser.intValue(); shardSize = parser.intValue();
} }
} }
} }
@ -97,20 +96,8 @@ public class GeoHashGridParser implements Aggregator.Parser {
shardSize = requiredSize; shardSize = requiredSize;
} }
ValuesSourceConfig<ValuesSource.GeoPoint> config = new ValuesSourceConfig<>(ValuesSource.GeoPoint.class); return new GeoGridFactory(aggregationName, vsParser.config(), precision, requiredSize, shardSize);
if (field == null) {
return new GeoGridFactory(aggregationName, config, precision, requiredSize, shardSize);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new GeoGridFactory(aggregationName, config, precision, requiredSize, shardSize);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new GeoGridFactory(aggregationName, config, precision, requiredSize, shardSize);
} }
@ -120,9 +107,8 @@ public class GeoHashGridParser implements Aggregator.Parser {
private int requiredSize; private int requiredSize;
private int shardSize; private int shardSize;
public GeoGridFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> valueSourceConfig, public GeoGridFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> config, int precision, int requiredSize, int shardSize) {
int precision, int requiredSize, int shardSize) { super(name, InternalGeoHashGrid.TYPE.name(), config);
super(name, InternalGeoHashGrid.TYPE.name(), valueSourceConfig);
this.precision = precision; this.precision = precision;
this.requiredSize = requiredSize; this.requiredSize = requiredSize;
this.shardSize = shardSize; this.shardSize = shardSize;

View File

@ -21,28 +21,19 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.rounding.TimeZoneRounding; import org.elasticsearch.common.rounding.TimeZoneRounding;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
/** /**
* *
@ -82,12 +73,12 @@ public class DateHistogramParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class); ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context)
.targetValueType(ValueType.DATE)
.requiresSortedValues(true)
.formattable(true)
.build();
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false; boolean keyed = false;
long minDocCount = 1; long minDocCount = 1;
ExtendedBounds extendedBounds = null; ExtendedBounds extendedBounds = null;
@ -96,24 +87,18 @@ public class DateHistogramParser implements Aggregator.Parser {
boolean preZoneAdjustLargeInterval = false; boolean preZoneAdjustLargeInterval = false;
DateTimeZone preZone = DateTimeZone.UTC; DateTimeZone preZone = DateTimeZone.UTC;
DateTimeZone postZone = DateTimeZone.UTC; DateTimeZone postZone = DateTimeZone.UTC;
String format = null;
long preOffset = 0; long preOffset = 0;
long postOffset = 0; long postOffset = 0;
boolean assumeSorted = false;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) { if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
preZone = parseZone(parser.text()); preZone = parseZone(parser.text());
} else if ("pre_zone".equals(currentFieldName) || "preZone".equals(currentFieldName)) { } else if ("pre_zone".equals(currentFieldName) || "preZone".equals(currentFieldName)) {
preZone = parseZone(parser.text()); preZone = parseZone(parser.text());
@ -125,16 +110,12 @@ public class DateHistogramParser implements Aggregator.Parser {
postOffset = parseOffset(parser.text()); postOffset = parseOffset(parser.text());
} else if ("interval".equals(currentFieldName)) { } else if ("interval".equals(currentFieldName)) {
interval = parser.text(); interval = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) { if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else if ("pre_zone_adjust_large_interval".equals(currentFieldName) || "preZoneAdjustLargeInterval".equals(currentFieldName)) { } else if ("pre_zone_adjust_large_interval".equals(currentFieldName) || "preZoneAdjustLargeInterval".equals(currentFieldName)) {
preZoneAdjustLargeInterval = parser.booleanValue(); preZoneAdjustLargeInterval = parser.booleanValue();
} else { } else {
@ -153,9 +134,7 @@ public class DateHistogramParser implements Aggregator.Parser {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) { if ("order".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("order".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
@ -204,17 +183,6 @@ public class DateHistogramParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); throw new SearchParseException(context, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
} }
SearchScript searchScript = null;
if (script != null) {
searchScript = context.scriptService().search(context.lookup(), scriptLang, script, scriptParams);
config.script(searchScript);
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
TimeZoneRounding.Builder tzRoundingBuilder; TimeZoneRounding.Builder tzRoundingBuilder;
DateTimeUnit dateTimeUnit = dateFieldUnits.get(interval); DateTimeUnit dateTimeUnit = dateFieldUnits.get(interval);
if (dateTimeUnit != null) { if (dateTimeUnit != null) {
@ -230,38 +198,8 @@ public class DateHistogramParser implements Aggregator.Parser {
.preOffset(preOffset).postOffset(postOffset) .preOffset(preOffset).postOffset(postOffset)
.build(); .build();
ValueFormatter valueFormatter = format != null ? new ValueFormatter.DateTime(format) : null; return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
if (field == null) {
if (searchScript != null) {
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT));
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
// falling back on the get field data context
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, null, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
valueFormatter = format == null ? new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER) : null;
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT));
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
}
if (!(mapper instanceof DateFieldMapper)) {
throw new SearchParseException(context, "date histogram can only be aggregated on date fields but [" + field + "] is not a date field");
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format == null) {
valueFormatter = new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter());
}
ValueParser valueParser = new ValueParser.DateMath(new DateMathParser(((DateFieldMapper) mapper).dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalDateHistogram.FACTORY);
} }
private static InternalOrder resolveOrder(String key, boolean asc) { private static InternalOrder resolveOrder(String key, boolean asc) {

View File

@ -47,11 +47,12 @@ public class ExtendedBounds {
} }
void processAndValidate(String aggName, SearchContext context, ValueParser parser) { void processAndValidate(String aggName, SearchContext context, ValueParser parser) {
assert parser != null;
if (minAsStr != null) { if (minAsStr != null) {
min = parser != null ? parser.parseLong(minAsStr, context) : Long.parseLong(minAsStr); min = parser.parseLong(minAsStr, context);
} }
if (maxAsStr != null) { if (maxAsStr != null) {
max = parser != null ? parser.parseLong(maxAsStr, context) : Long.parseLong(maxAsStr); max = parser.parseLong(maxAsStr, context);
} }
if (min != null && max != null && min.compareTo(max) > 0) { if (min != null && max != null && min.compareTo(max) > 0) {
throw new SearchParseException(context, "[extended_bounds.min][" + min + "] cannot be greater than " + throw new SearchParseException(context, "[extended_bounds.min][" + min + "] cannot be greater than " +

View File

@ -44,11 +44,11 @@ import java.util.List;
public class HistogramAggregator extends BucketsAggregator { public class HistogramAggregator extends BucketsAggregator {
private final ValuesSource.Numeric valuesSource; private final ValuesSource.Numeric valuesSource;
private final ValueFormatter formatter; private final @Nullable ValueFormatter formatter;
private final ValueParser parser;
private final Rounding rounding; private final Rounding rounding;
private final InternalOrder order; private final InternalOrder order;
private final boolean keyed; private final boolean keyed;
private final long minDocCount; private final long minDocCount;
private final ExtendedBounds extendedBounds; private final ExtendedBounds extendedBounds;
private final InternalHistogram.Factory histogramFactory; private final InternalHistogram.Factory histogramFactory;
@ -58,7 +58,7 @@ public class HistogramAggregator extends BucketsAggregator {
public HistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, InternalOrder order, public HistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, InternalOrder order,
boolean keyed, long minDocCount, @Nullable ExtendedBounds extendedBounds, boolean keyed, long minDocCount, @Nullable ExtendedBounds extendedBounds,
@Nullable ValuesSource.Numeric valuesSource, ValueFormatter formatter, ValueParser parser, @Nullable ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
long initialCapacity, InternalHistogram.Factory<?> histogramFactory, long initialCapacity, InternalHistogram.Factory<?> histogramFactory,
AggregationContext aggregationContext, Aggregator parent) { AggregationContext aggregationContext, Aggregator parent) {
@ -70,7 +70,6 @@ public class HistogramAggregator extends BucketsAggregator {
this.extendedBounds = extendedBounds; this.extendedBounds = extendedBounds;
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
this.formatter = formatter; this.formatter = formatter;
this.parser = parser;
this.histogramFactory = histogramFactory; this.histogramFactory = histogramFactory;
bucketOrds = new LongHash(initialCapacity, aggregationContext.bigArrays()); bucketOrds = new LongHash(initialCapacity, aggregationContext.bigArrays());
@ -147,11 +146,11 @@ public class HistogramAggregator extends BucketsAggregator {
private final ExtendedBounds extendedBounds; private final ExtendedBounds extendedBounds;
private final InternalHistogram.Factory<?> histogramFactory; private final InternalHistogram.Factory<?> histogramFactory;
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> config, ValueFormatter formatter, ValueParser parser, public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> config,
Rounding rounding, InternalOrder order, boolean keyed, long minDocCount, Rounding rounding, InternalOrder order, boolean keyed, long minDocCount,
ExtendedBounds extendedBounds, InternalHistogram.Factory<?> histogramFactory) { ExtendedBounds extendedBounds, InternalHistogram.Factory<?> histogramFactory) {
super(name, histogramFactory.type(), config, formatter, parser); super(name, histogramFactory.type(), config);
this.rounding = rounding; this.rounding = rounding;
this.order = order; this.order = order;
this.keyed = keyed; this.keyed = keyed;
@ -162,7 +161,7 @@ public class HistogramAggregator extends BucketsAggregator {
@Override @Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, null, null, 0, histogramFactory, aggregationContext, parent); return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, config.formatter(), 0, histogramFactory, aggregationContext, parent);
} }
@Override @Override
@ -175,10 +174,10 @@ public class HistogramAggregator extends BucketsAggregator {
ExtendedBounds roundedBounds = null; ExtendedBounds roundedBounds = null;
if (extendedBounds != null) { if (extendedBounds != null) {
// we need to process & validate here using the parser // we need to process & validate here using the parser
extendedBounds.processAndValidate(name, aggregationContext.searchContext(), parser); extendedBounds.processAndValidate(name, aggregationContext.searchContext(), config.parser());
roundedBounds = extendedBounds.round(rounding); roundedBounds = extendedBounds.round(rounding);
} }
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource, formatter, parser, 50, histogramFactory, aggregationContext, parent); return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource, config.formatter(), 50, histogramFactory, aggregationContext, parent);
} }
} }

View File

@ -21,20 +21,15 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
/** /**
* Parses the histogram request * Parses the histogram request
@ -51,18 +46,16 @@ public class HistogramParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class); ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context)
.requiresSortedValues(true)
.targetValueType(ValueType.NUMERIC)
.formattable(true)
.build();
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false; boolean keyed = false;
long minDocCount = 1; long minDocCount = 1;
InternalOrder order = (InternalOrder) InternalOrder.KEY_ASC; InternalOrder order = (InternalOrder) InternalOrder.KEY_ASC;
long interval = -1; long interval = -1;
boolean assumeSorted = false;
String format = null;
ExtendedBounds extendedBounds = null; ExtendedBounds extendedBounds = null;
XContentParser.Token token; XContentParser.Token token;
@ -70,16 +63,8 @@ public class HistogramParser implements Aggregator.Parser {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_NUMBER) { } else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("interval".equals(currentFieldName)) { if ("interval".equals(currentFieldName)) {
interval = parser.longValue(); interval = parser.longValue();
@ -91,15 +76,11 @@ public class HistogramParser implements Aggregator.Parser {
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) { if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) { if ("order".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("order".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
@ -143,35 +124,10 @@ public class HistogramParser implements Aggregator.Parser {
if (extendedBounds != null) { if (extendedBounds != null) {
// with numeric histogram, we can process here and fail fast if the bounds are invalid // with numeric histogram, we can process here and fail fast if the bounds are invalid
extendedBounds.processAndValidate(aggregationName, context, null); extendedBounds.processAndValidate(aggregationName, context, ValueParser.RAW);
} }
if (script != null) { return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new HistogramAggregator.Factory(aggregationName, config, null, null, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new HistogramAggregator.Factory(aggregationName, config, null, null, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
ValueFormatter valueFormatter = format == null ? ValueFormatter.RAW : new ValueFormatter.Number.Pattern(format);
ValueParser valueParser = format == null ? ValueParser.RAW : new ValueParser.Number.Pattern(format);
return new HistogramAggregator.Factory(aggregationName, config, valueFormatter, valueParser, rounding, order, keyed, minDocCount, extendedBounds, InternalHistogram.FACTORY);
} }

View File

@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.histogram; package org.elasticsearch.search.aggregations.bucket.histogram;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
@ -53,11 +53,8 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
static class Bucket extends InternalHistogram.Bucket implements DateHistogram.Bucket { static class Bucket extends InternalHistogram.Bucket implements DateHistogram.Bucket {
private final ValueFormatter formatter; Bucket(long key, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
super(key, docCount, formatter, aggregations);
Bucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
super(key, docCount, aggregations);
this.formatter = formatter;
} }
@Override @Override
@ -88,12 +85,12 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
@Override @Override
public InternalDateHistogram create(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order, public InternalDateHistogram create(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order,
long minDocCount, EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) { long minDocCount, EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed) {
return new InternalDateHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed); return new InternalDateHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
} }
@Override @Override
public InternalDateHistogram.Bucket createBucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public InternalDateHistogram.Bucket createBucket(long key, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return new Bucket(key, docCount, aggregations, formatter); return new Bucket(key, docCount, aggregations, formatter);
} }
} }
@ -103,7 +100,7 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
InternalDateHistogram() {} // for serialization InternalDateHistogram() {} // for serialization
InternalDateHistogram(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order, long minDocCount, InternalDateHistogram(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) { EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed) {
super(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed); super(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
} }

View File

@ -22,6 +22,7 @@ import com.carrotsearch.hppc.LongObjectOpenHashMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.rounding.Rounding;
@ -68,17 +69,19 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
long key; long key;
long docCount; long docCount;
protected transient final @Nullable ValueFormatter formatter;
InternalAggregations aggregations; InternalAggregations aggregations;
public Bucket(long key, long docCount, InternalAggregations aggregations) { public Bucket(long key, long docCount, @Nullable ValueFormatter formatter, InternalAggregations aggregations) {
this.key = key; this.key = key;
this.docCount = docCount; this.docCount = docCount;
this.formatter = formatter;
this.aggregations = aggregations; this.aggregations = aggregations;
} }
@Override @Override
public String getKey() { public String getKey() {
return String.valueOf(key); return formatter != null ? formatter.format(key) : ValueFormatter.RAW.format(key);
} }
@Override @Override
@ -121,6 +124,28 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
reduced.aggregations = InternalAggregations.reduce(aggregations, bigArrays); reduced.aggregations = InternalAggregations.reduce(aggregations, bigArrays);
return (B) reduced; return (B) reduced;
} }
void toXContent(XContentBuilder builder, Params params, boolean keyed, @Nullable ValueFormatter formatter) throws IOException {
if (formatter != null) {
Text keyTxt = new StringText(formatter.format(key));
if (keyed) {
builder.startObject(keyTxt.string());
} else {
builder.startObject();
}
builder.field(CommonFields.KEY_AS_STRING, keyTxt);
} else {
if (keyed) {
builder.startObject(String.valueOf(getKeyAsNumber()));
} else {
builder.startObject();
}
}
builder.field(CommonFields.KEY, key);
builder.field(CommonFields.DOC_COUNT, docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
}
} }
static class EmptyBucketInfo { static class EmptyBucketInfo {
@ -173,12 +198,12 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
} }
public InternalHistogram<B> create(String name, List<B> buckets, InternalOrder order, long minDocCount, public InternalHistogram<B> create(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) { EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed) {
return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed); return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
} }
public B createBucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public B createBucket(long key, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return (B) new Bucket(key, docCount, aggregations); return (B) new Bucket(key, docCount, formatter, aggregations);
} }
} }
@ -186,14 +211,15 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
protected List<B> buckets; protected List<B> buckets;
private LongObjectOpenHashMap<B> bucketsMap; private LongObjectOpenHashMap<B> bucketsMap;
private InternalOrder order; private InternalOrder order;
private ValueFormatter formatter; private @Nullable ValueFormatter formatter;
private boolean keyed; private boolean keyed;
private long minDocCount; private long minDocCount;
private EmptyBucketInfo emptyBucketInfo; private EmptyBucketInfo emptyBucketInfo;
InternalHistogram() {} // for serialization InternalHistogram() {} // for serialization
InternalHistogram(String name, List<B> buckets, InternalOrder order, long minDocCount, EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) { InternalHistogram(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed) {
super(name); super(name);
this.buckets = buckets; this.buckets = buckets;
this.order = order; this.order = order;
@ -416,8 +442,8 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
return reduced; return reduced;
} }
protected B createBucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { protected B createBucket(long key, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return (B) new InternalHistogram.Bucket(key, docCount, aggregations); return (B) new InternalHistogram.Bucket(key, docCount, formatter, aggregations);
} }
@Override @Override
@ -465,29 +491,9 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
} else { } else {
builder.startArray(CommonFields.BUCKETS); builder.startArray(CommonFields.BUCKETS);
} }
for (B bucket : buckets) { for (B bucket : buckets) {
if (formatter != null) { bucket.toXContent(builder, params, keyed, formatter);
Text keyTxt = new StringText(formatter.format(bucket.key));
if (keyed) {
builder.startObject(keyTxt.string());
} else {
builder.startObject();
} }
builder.field(CommonFields.KEY_AS_STRING, keyTxt);
} else {
if (keyed) {
builder.startObject(String.valueOf(bucket.getKeyAsNumber()));
} else {
builder.startObject();
}
}
builder.field(CommonFields.KEY, bucket.key);
builder.field(CommonFields.DOC_COUNT, bucket.docCount);
bucket.aggregations.toXContentInternal(builder, params);
builder.endObject();
}
if (keyed) { if (keyed) {
builder.endObject(); builder.endObject();
} else { } else {

View File

@ -19,13 +19,10 @@
package org.elasticsearch.search.aggregations.bucket.missing; package org.elasticsearch.search.aggregations.bucket.missing;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -43,37 +40,22 @@ public class MissingParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource> config = new ValuesSourceConfig<>(ValuesSource.class); ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context)
.scriptable(false)
String field = null; .build();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else { } else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]."); throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
} }
} }
if (field == null) { return new MissingAggregator.Factory(aggregationName, vsParser.config());
return new MissingAggregator.Factory(aggregationName, config);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new MissingAggregator.Factory(aggregationName, config);
}
config.fieldContext(new FieldContext(field, context.fieldData().getForField(mapper)));
return new MissingAggregator.Factory(aggregationName, config);
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.range; package org.elasticsearch.search.aggregations.bucket.range;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
@ -65,8 +66,8 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
InternalAggregations aggregations; InternalAggregations aggregations;
private String key; private String key;
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
this.key = key != null ? key : key(from, to, formatter); this.key = key != null ? key : generateKey(from, to, formatter);
this.from = from; this.from = from;
this.to = to; this.to = to;
this.docCount = docCount; this.docCount = docCount;
@ -123,7 +124,7 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
return reduced; return reduced;
} }
void toXContent(XContentBuilder builder, Params params, ValueFormatter formatter, boolean keyed) throws IOException { void toXContent(XContentBuilder builder, Params params, @Nullable ValueFormatter formatter, boolean keyed) throws IOException {
if (keyed) { if (keyed) {
builder.startObject(key); builder.startObject(key);
} else { } else {
@ -147,11 +148,11 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
builder.endObject(); builder.endObject();
} }
private static String key(double from, double to, ValueFormatter formatter) { protected String generateKey(double from, double to, @Nullable ValueFormatter formatter) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(Double.isInfinite(from) ? "*" : formatter != null ? formatter.format(from) : from); sb.append(Double.isInfinite(from) ? "*" : formatter != null ? formatter.format(from) : ValueFormatter.RAW.format(from));
sb.append("-"); sb.append("-");
sb.append(Double.isInfinite(to) ? "*" : formatter != null ? formatter.format(to) : to); sb.append(Double.isInfinite(to) ? "*" : formatter != null ? formatter.format(to) : ValueFormatter.RAW.format(to));
return sb.toString(); return sb.toString();
} }
@ -163,26 +164,25 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalAggre
return TYPE.name(); return TYPE.name();
} }
public R create(String name, List<B> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { public R create(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
return (R) new InternalRange<>(name, ranges, formatter, keyed, unmapped); return (R) new InternalRange<>(name, ranges, formatter, keyed, unmapped);
} }
public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return (B) new Bucket(key, from, to, docCount, aggregations, formatter); return (B) new Bucket(key, from, to, docCount, aggregations, formatter);
} }
} }
private List<B> ranges; private List<B> ranges;
private Map<String, B> rangeMap; private Map<String, B> rangeMap;
private ValueFormatter formatter; private @Nullable ValueFormatter formatter;
private boolean keyed; private boolean keyed;
private boolean unmapped; private boolean unmapped;
public InternalRange() {} // for serialization public InternalRange() {} // for serialization
public InternalRange(String name, List<B> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { public InternalRange(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
super(name); super(name);
this.ranges = ranges; this.ranges = ranges;
this.formatter = formatter; this.formatter = formatter;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.range;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.InPlaceMergeSorter;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
@ -28,8 +29,10 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -65,18 +68,19 @@ public class RangeAggregator extends BucketsAggregator {
return "[" + from + " to " + to + ")"; return "[" + from + " to " + to + ")";
} }
public void process(ValueParser parser, AggregationContext aggregationContext) { public void process(ValueParser parser, SearchContext context) {
assert parser != null;
if (fromAsStr != null) { if (fromAsStr != null) {
from = parser != null ? parser.parseDouble(fromAsStr, aggregationContext.searchContext()) : Double.valueOf(fromAsStr); from = parser.parseDouble(fromAsStr, context);
} }
if (toAsStr != null) { if (toAsStr != null) {
to = parser != null ? parser.parseDouble(toAsStr, aggregationContext.searchContext()) : Double.valueOf(toAsStr); to = parser.parseDouble(toAsStr, context);
} }
} }
} }
private final ValuesSource.Numeric valuesSource; private final ValuesSource.Numeric valuesSource;
private final ValueFormatter formatter; private final @Nullable ValueFormatter formatter;
private final Range[] ranges; private final Range[] ranges;
private final boolean keyed; private final boolean keyed;
private final InternalRange.Factory rangeFactory; private final InternalRange.Factory rangeFactory;
@ -87,8 +91,7 @@ public class RangeAggregator extends BucketsAggregator {
public RangeAggregator(String name, public RangeAggregator(String name,
AggregatorFactories factories, AggregatorFactories factories,
ValuesSource.Numeric valuesSource, ValuesSource.Numeric valuesSource,
ValueFormatter formatter, @Nullable ValueFormat format,
ValueParser parser,
InternalRange.Factory rangeFactory, InternalRange.Factory rangeFactory,
List<Range> ranges, List<Range> ranges,
boolean keyed, boolean keyed,
@ -98,12 +101,14 @@ public class RangeAggregator extends BucketsAggregator {
super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent); super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent);
assert valuesSource != null; assert valuesSource != null;
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
this.formatter = formatter; this.formatter = format != null ? format.formatter() : null;
this.keyed = keyed; this.keyed = keyed;
this.rangeFactory = rangeFactory; this.rangeFactory = rangeFactory;
this.ranges = ranges.toArray(new Range[ranges.size()]); this.ranges = ranges.toArray(new Range[ranges.size()]);
ValueParser parser = format != null ? format.parser() : ValueParser.RAW;
for (int i = 0; i < this.ranges.length; i++) { for (int i = 0; i < this.ranges.length; i++) {
this.ranges[i].process(parser, context); this.ranges[i].process(parser, context.searchContext());
} }
sortRanges(this.ranges); sortRanges(this.ranges);
@ -193,8 +198,8 @@ public class RangeAggregator extends BucketsAggregator {
for (int i = 0; i < ranges.length; i++) { for (int i = 0; i < ranges.length; i++) {
Range range = ranges[i]; Range range = ranges[i];
final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i); final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i);
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket( org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket =
range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), formatter); rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), formatter);
buckets.add(bucket); buckets.add(bucket);
} }
// value source can be null in the case of unmapped fields // value source can be null in the case of unmapped fields
@ -207,8 +212,8 @@ public class RangeAggregator extends BucketsAggregator {
List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length);
for (int i = 0; i < ranges.length; i++) { for (int i = 0; i < ranges.length; i++) {
Range range = ranges[i]; Range range = ranges[i];
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket( org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket =
range.key, range.from, range.to, 0, subAggs, formatter); rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, formatter);
buckets.add(bucket); buckets.add(bucket);
} }
// value source can be null in the case of unmapped fields // value source can be null in the case of unmapped fields
@ -242,33 +247,30 @@ public class RangeAggregator extends BucketsAggregator {
private final boolean keyed; private final boolean keyed;
private final InternalRange.Factory factory; private final InternalRange.Factory factory;
private final ValueFormatter formatter; private final ValueFormatter formatter;
private final ValueParser parser;
public Unmapped(String name, public Unmapped(String name,
List<RangeAggregator.Range> ranges, List<RangeAggregator.Range> ranges,
boolean keyed, boolean keyed,
ValueFormatter formatter, ValueFormat format,
ValueParser parser, AggregationContext context,
AggregationContext aggregationContext,
Aggregator parent, Aggregator parent,
InternalRange.Factory factory) { InternalRange.Factory factory) {
super(name, aggregationContext, parent); super(name, context, parent);
this.ranges = ranges; this.ranges = ranges;
ValueParser parser = format != null ? format.parser() : ValueParser.RAW;
for (Range range : this.ranges) { for (Range range : this.ranges) {
range.process(parser, context); range.process(parser, context.searchContext());
} }
this.keyed = keyed; this.keyed = keyed;
this.formatter = formatter; this.formatter = format != null ? format.formatter() : null;
this.parser = parser;
this.factory = factory; this.factory = factory;
} }
@Override @Override
public InternalAggregation buildEmptyAggregation() { public InternalAggregation buildEmptyAggregation() {
InternalAggregations subAggs = buildEmptySubAggregations(); InternalAggregations subAggs = buildEmptySubAggregations();
List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.size());
new ArrayList<>(ranges.size());
for (RangeAggregator.Range range : ranges) { for (RangeAggregator.Range range : ranges) {
buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, formatter)); buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, formatter));
} }
@ -282,8 +284,8 @@ public class RangeAggregator extends BucketsAggregator {
private final List<Range> ranges; private final List<Range> ranges;
private final boolean keyed; private final boolean keyed;
public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valueSourceConfig, ValueFormatter formatter, ValueParser parser, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) { public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valueSourceConfig, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) {
super(name, rangeFactory.type(), valueSourceConfig, formatter, parser); super(name, rangeFactory.type(), valueSourceConfig);
this.rangeFactory = rangeFactory; this.rangeFactory = rangeFactory;
this.ranges = ranges; this.ranges = ranges;
this.keyed = keyed; this.keyed = keyed;
@ -291,12 +293,12 @@ public class RangeAggregator extends BucketsAggregator {
@Override @Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new Unmapped(name, ranges, keyed, formatter, parser, aggregationContext, parent, rangeFactory); return new Unmapped(name, ranges, keyed, config.format(), aggregationContext, parent, rangeFactory);
} }
@Override @Override
protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) { protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new RangeAggregator(name, factories, valuesSource, formatter, parser, rangeFactory, ranges, keyed, aggregationContext, parent); return new RangeAggregator(name, factories, valuesSource, config.format(), rangeFactory, ranges, keyed, aggregationContext, parent);
} }
} }

View File

@ -19,22 +19,16 @@
package org.elasticsearch.search.aggregations.bucket.range; package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* *
@ -49,34 +43,21 @@ public class RangeParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
List<RangeAggregator.Range> ranges = null; List<RangeAggregator.Range> ranges = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false; boolean keyed = false;
boolean assumeSorted = false;
String format = null; ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalRange.TYPE, context)
.requiresSortedValues(true)
.formattable(true)
.build();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("ranges".equals(currentFieldName)) { if ("ranges".equals(currentFieldName)) {
ranges = new ArrayList<>(); ranges = new ArrayList<>();
@ -111,17 +92,9 @@ public class RangeParser implements Aggregator.Parser {
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) { if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -134,30 +107,6 @@ public class RangeParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]"); throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]");
} }
if (script != null) { return new RangeAggregator.Factory(aggregationName, vsParser.config(), InternalRange.FACTORY, ranges, keyed);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, null, null, InternalRange.FACTORY, ranges, keyed);
}
ValueFormatter valueFormatter = format == null ? ValueFormatter.RAW : new ValueFormatter.Number.Pattern(format);
ValueParser valueParser = format == null ? ValueParser.RAW : new ValueParser.Number.Pattern(format);
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalRange.FACTORY, ranges, keyed);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalRange.FACTORY, ranges, keyed);
} }
} }

View File

@ -19,25 +19,18 @@
package org.elasticsearch.search.aggregations.bucket.range.date; package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* *
@ -52,34 +45,22 @@ public class DateRangeParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class); ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateRange.TYPE, context)
.targetValueType(ValueType.DATE)
.requiresSortedValues(true)
.formattable(true)
.build();
String field = null;
List<RangeAggregator.Range> ranges = null; List<RangeAggregator.Range> ranges = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false; boolean keyed = false;
String format = null;
boolean assumeSorted = false;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("ranges".equals(currentFieldName)) { if ("ranges".equals(currentFieldName)) {
ranges = new ArrayList<>(); ranges = new ArrayList<>();
@ -116,17 +97,9 @@ public class DateRangeParser implements Aggregator.Parser {
ranges.add(new RangeAggregator.Range(key, from, fromAsStr, to, toAsStr)); ranges.add(new RangeAggregator.Range(key, from, fromAsStr, to, toAsStr));
} }
} }
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) { if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -139,38 +112,6 @@ public class DateRangeParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]"); throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]");
} }
if (script != null) { return new RangeAggregator.Factory(aggregationName, vsParser.config(), InternalDateRange.FACTORY, ranges, keyed);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
ValueFormatter valueFormatter = format != null ? new ValueFormatter.DateTime(format) : ValueFormatter.DateTime.DEFAULT;
ValueParser valueParser = ValueParser.DateMath.DEFAULT;
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
}
if (!(mapper instanceof DateFieldMapper)) {
throw new AggregationExecutionException("date_range aggregation can only be applied to date fields which is not the case with field [" + field + "]");
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
if (format == null) {
valueFormatter = new ValueFormatter.DateTime(((DateFieldMapper) mapper).dateTimeFormatter());
}
valueParser = new ValueParser.DateMath(((DateFieldMapper) mapper).dateMathParser());
return new RangeAggregator.Factory(aggregationName, config, valueFormatter, valueParser, InternalDateRange.FACTORY, ranges, keyed);
} }
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.range.date; package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -93,7 +94,7 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> i
InternalDateRange() {} // for serialization InternalDateRange() {} // for serialization
InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
super(name, ranges, formatter, keyed, unmapped); super(name, ranges, formatter, keyed, unmapped);
} }

View File

@ -18,12 +18,15 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.range.geodistance; package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -31,7 +34,6 @@ import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
import org.elasticsearch.search.aggregations.support.*; import org.elasticsearch.search.aggregations.support.*;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -43,6 +45,8 @@ import java.util.List;
*/ */
public class GeoDistanceParser implements Aggregator.Parser { public class GeoDistanceParser implements Aggregator.Parser {
private static final ParseField ORIGIN_FIELD = new ParseField("origin", "center", "point", "por");
@Override @Override
public String type() { public String type() {
return InternalGeoDistance.TYPE.name(); return InternalGeoDistance.TYPE.name();
@ -62,9 +66,13 @@ public class GeoDistanceParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
String field = null; ValuesSourceParser<ValuesSource.GeoPoint> vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoDistance.TYPE, context)
.requiresSortedValues(true)
.build();
GeoPointParser geoPointParser = new GeoPointParser(aggregationName, InternalGeoDistance.TYPE, context, ORIGIN_FIELD);
List<RangeAggregator.Range> ranges = null; List<RangeAggregator.Range> ranges = null;
GeoPoint origin = null;
DistanceUnit unit = DistanceUnit.DEFAULT; DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance distanceType = GeoDistance.DEFAULT; GeoDistance distanceType = GeoDistance.DEFAULT;
boolean keyed = false; boolean keyed = false;
@ -74,16 +82,15 @@ public class GeoDistanceParser implements Aggregator.Parser {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (geoPointParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) { if ("unit".equals(currentFieldName)) {
field = parser.text();
} else if ("unit".equals(currentFieldName)) {
unit = DistanceUnit.fromString(parser.text()); unit = DistanceUnit.fromString(parser.text());
} else if ("distance_type".equals(currentFieldName) || "distanceType".equals(currentFieldName)) { } else if ("distance_type".equals(currentFieldName) || "distanceType".equals(currentFieldName)) {
distanceType = GeoDistance.fromString(parser.text()); distanceType = GeoDistance.fromString(parser.text());
} else if ("point".equals(currentFieldName) || "origin".equals(currentFieldName) || "center".equals(currentFieldName)) {
origin = new GeoPoint();
origin.resetFromString(parser.text());
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -124,43 +131,6 @@ public class GeoDistanceParser implements Aggregator.Parser {
} }
ranges.add(new RangeAggregator.Range(key(key, from, to), from, fromAsStr, to, toAsStr)); ranges.add(new RangeAggregator.Range(key(key, from, to), from, fromAsStr, to, toAsStr));
} }
} else if ("point".equals(currentFieldName) || "origin".equals(currentFieldName) || "center".equals(currentFieldName)) {
double lat = Double.NaN;
double lon = Double.NaN;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (Double.isNaN(lon)) {
lon = parser.doubleValue();
} else if (Double.isNaN(lat)) {
lat = parser.doubleValue();
} else {
throw new SearchParseException(context, "malformed [origin] geo point array in geo_distance aggregator [" + aggregationName + "]. " +
"a geo point array must be of the form [lon, lat]");
}
}
origin = new GeoPoint(lat, lon);
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("point".equals(currentFieldName) || "origin".equals(currentFieldName) || "center".equals(currentFieldName)) {
double lat = Double.NaN;
double lon = Double.NaN;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("lat".equals(currentFieldName)) {
lat = parser.doubleValue();
} else if ("lon".equals(currentFieldName)) {
lon = parser.doubleValue();
}
}
}
if (Double.isNaN(lat) || Double.isNaN(lon)) {
throw new SearchParseException(context, "malformed [origin] geo point object. either [lat] or [lon] (or both) are " +
"missing in geo_distance aggregator [" + aggregationName + "]");
}
origin = new GeoPoint(lat, lon);
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -173,25 +143,12 @@ public class GeoDistanceParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing [ranges] in geo_distance aggregator [" + aggregationName + "]"); throw new SearchParseException(context, "Missing [ranges] in geo_distance aggregator [" + aggregationName + "]");
} }
GeoPoint origin = geoPointParser.geoPoint();
if (origin == null) { if (origin == null) {
throw new SearchParseException(context, "Missing [origin] in geo_distance aggregator [" + aggregationName + "]"); throw new SearchParseException(context, "Missing [origin] in geo_distance aggregator [" + aggregationName + "]");
} }
ValuesSourceConfig<ValuesSource.GeoPoint> config = new ValuesSourceConfig<>(ValuesSource.GeoPoint.class); return new GeoDistanceFactory(aggregationName, vsParser.config(), InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
if (field == null) {
return new GeoDistanceFactory(aggregationName, config, InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new GeoDistanceFactory(aggregationName, config, InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new GeoDistanceFactory(aggregationName, config, InternalGeoDistance.FACTORY, origin, unit, distanceType, ranges, keyed);
} }
private static class GeoDistanceFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint> { private static class GeoDistanceFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint> {
@ -217,7 +174,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
@Override @Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
return new Unmapped(name, ranges, keyed, null, null, aggregationContext, parent, rangeFactory); return new Unmapped(name, ranges, keyed, null, aggregationContext, parent, rangeFactory);
} }
@Override @Override
@ -228,7 +185,7 @@ public class GeoDistanceParser implements Aggregator.Parser {
// we need to ensure uniqueness // we need to ensure uniqueness
distanceSource = new ValuesSource.Numeric.SortedAndUnique(distanceSource); distanceSource = new ValuesSource.Numeric.SortedAndUnique(distanceSource);
} }
return new RangeAggregator(name, factories, distanceSource, null, null, rangeFactory, ranges, keyed, aggregationContext, parent); return new RangeAggregator(name, factories, distanceSource, null, rangeFactory, ranges, keyed, aggregationContext, parent);
} }
private static class DistanceValues extends DoubleValues { private static class DistanceValues extends DoubleValues {

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.range.geodistance; package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -70,19 +71,19 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
} }
@Override @Override
public InternalGeoDistance create(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { public InternalGeoDistance create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
return new InternalGeoDistance(name, ranges, formatter, keyed, unmapped); return new InternalGeoDistance(name, ranges, formatter, keyed, unmapped);
} }
@Override @Override
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return new Bucket(key, from, to, docCount, aggregations, formatter); return new Bucket(key, from, to, docCount, aggregations, formatter);
} }
} }
InternalGeoDistance() {} // for serialization InternalGeoDistance() {} // for serialization
public InternalGeoDistance(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { public InternalGeoDistance(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
super(name, ranges, formatter, keyed, unmapped); super(name, ranges, formatter, keyed, unmapped);
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.range.ipv4; package org.elasticsearch.search.aggregations.bucket.range.ipv4;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -54,12 +55,12 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> i
public static class Bucket extends InternalRange.Bucket implements IPv4Range.Bucket { public static class Bucket extends InternalRange.Bucket implements IPv4Range.Bucket {
public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, ValueFormatter formatter) { public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations) {
super(key, from, to, docCount, new InternalAggregations(aggregations), formatter); super(key, from, to, docCount, new InternalAggregations(aggregations), ValueFormatter.IPv4);
} }
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations) {
super(key, from, to, docCount, aggregations, formatter); super(key, from, to, docCount, aggregations, ValueFormatter.IPv4);
} }
@Override @Override
@ -83,13 +84,13 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> i
} }
@Override @Override
public InternalIPv4Range create(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed, boolean unmapped) { public InternalIPv4Range create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, boolean unmapped) {
return new InternalIPv4Range(name, ranges, keyed, unmapped); return new InternalIPv4Range(name, ranges, keyed, unmapped);
} }
@Override @Override
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter) {
return new Bucket(key, from, to, docCount, aggregations, formatter); return new Bucket(key, from, to, docCount, aggregations);
} }
} }
@ -105,8 +106,8 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> i
} }
@Override @Override
protected Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, ValueFormatter formatter) { protected Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, @Nullable ValueFormatter formatter ) {
return new Bucket(key, from, to, docCount, aggregations, formatter); return new Bucket(key, from, to, docCount, aggregations);
} }
} }

View File

@ -19,25 +19,18 @@
package org.elasticsearch.search.aggregations.bucket.range.ipv4; package org.elasticsearch.search.aggregations.bucket.range.ipv4;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* *
@ -52,31 +45,22 @@ public class IpRangeParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class); ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalIPv4Range.TYPE, context)
.targetValueType(ValueType.IP)
.requiresSortedValues(true)
.formattable(false)
.build();
String field = null;
List<RangeAggregator.Range> ranges = null; List<RangeAggregator.Range> ranges = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false; boolean keyed = false;
boolean assumeSorted = false;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("ranges".equals(currentFieldName)) { if ("ranges".equals(currentFieldName)) {
ranges = new ArrayList<>(); ranges = new ArrayList<>();
@ -118,17 +102,9 @@ public class IpRangeParser implements Aggregator.Parser {
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) { if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -141,32 +117,7 @@ public class IpRangeParser implements Aggregator.Parser {
throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]"); throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]");
} }
if (script != null) { return new RangeAggregator.Factory(aggregationName, vsParser.config(), InternalIPv4Range.FACTORY, ranges, keyed);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
}
if (!(mapper instanceof IpFieldMapper)) {
throw new AggregationExecutionException("ip_range aggregation can only be applied to ip fields which is not the case with field [" + field + "]");
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new RangeAggregator.Factory(aggregationName, config, ValueFormatter.IPv4, ValueParser.IPv4, InternalIPv4Range.FACTORY, ranges, keyed);
} }
private static void parseMaskRange(String cidr, RangeAggregator.Range range, String aggregationName, SearchContext ctx) { private static void parseMaskRange(String cidr, RangeAggregator.Range range, String aggregationName, SearchContext ctx) {

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.significant; package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
@ -84,15 +85,15 @@ public class SignificantLongTerms extends InternalSignificantTerms {
} }
private ValueFormatter valueFormatter; private ValueFormatter formatter;
SignificantLongTerms() {} // for serialization SignificantLongTerms() {} // for serialization
public SignificantLongTerms(long subsetSize, long supersetSize, String name, ValueFormatter valueFormatter, public SignificantLongTerms(long subsetSize, long supersetSize, String name, @Nullable ValueFormatter formatter,
int requiredSize, long minDocCount, Collection<InternalSignificantTerms.Bucket> buckets) { int requiredSize, long minDocCount, Collection<InternalSignificantTerms.Bucket> buckets) {
super(subsetSize, supersetSize, name, requiredSize, minDocCount, buckets); super(subsetSize, supersetSize, name, requiredSize, minDocCount, buckets);
this.valueFormatter = valueFormatter; this.formatter = formatter;
} }
@Override @Override
@ -103,7 +104,7 @@ public class SignificantLongTerms extends InternalSignificantTerms {
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
this.name = in.readString(); this.name = in.readString();
this.valueFormatter = ValueFormatterStreams.readOptional(in); this.formatter = ValueFormatterStreams.readOptional(in);
this.requiredSize = readSize(in); this.requiredSize = readSize(in);
this.minDocCount = in.readVLong(); this.minDocCount = in.readVLong();
this.subsetSize = in.readVLong(); this.subsetSize = in.readVLong();
@ -124,7 +125,7 @@ public class SignificantLongTerms extends InternalSignificantTerms {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(name); out.writeString(name);
ValueFormatterStreams.writeOptional(valueFormatter, out); ValueFormatterStreams.writeOptional(formatter, out);
writeSize(requiredSize, out); writeSize(requiredSize, out);
out.writeVLong(minDocCount); out.writeVLong(minDocCount);
out.writeVLong(subsetSize); out.writeVLong(subsetSize);
@ -146,8 +147,8 @@ public class SignificantLongTerms extends InternalSignificantTerms {
for (InternalSignificantTerms.Bucket bucket : buckets) { for (InternalSignificantTerms.Bucket bucket : buckets) {
builder.startObject(); builder.startObject();
builder.field(CommonFields.KEY, ((Bucket) bucket).term); builder.field(CommonFields.KEY, ((Bucket) bucket).term);
if (valueFormatter != null) { if (formatter != null) {
builder.field(CommonFields.KEY_AS_STRING, valueFormatter.format(((Bucket) bucket).term)); builder.field(CommonFields.KEY_AS_STRING, formatter.format(((Bucket) bucket).term));
} }
builder.field(CommonFields.DOC_COUNT, bucket.getDocCount()); builder.field(CommonFields.DOC_COUNT, bucket.getDocCount());
builder.field("score", bucket.score); builder.field("score", bucket.score);

View File

@ -19,13 +19,14 @@
package org.elasticsearch.search.aggregations.bucket.significant; package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.LongTermsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException; import java.io.IOException;
@ -37,11 +38,11 @@ import java.util.Collections;
*/ */
public class SignificantLongTermsAggregator extends LongTermsAggregator { public class SignificantLongTermsAggregator extends LongTermsAggregator {
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter, public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
long estimatedBucketCount, int requiredSize, int shardSize, long minDocCount, long estimatedBucketCount, int requiredSize, int shardSize, long minDocCount,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory) { AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory) {
super(name, factories, valuesSource, formatter, estimatedBucketCount, null, requiredSize, shardSize, minDocCount, aggregationContext, parent); super(name, factories, valuesSource, format, estimatedBucketCount, null, requiredSize, shardSize, minDocCount, aggregationContext, parent);
this.termsAggFactory = termsAggFactory; this.termsAggFactory = termsAggFactory;
} }
@ -95,8 +96,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd); bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket; list[i] = bucket;
} }
return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, requiredSize, minDocCount, return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, requiredSize, minDocCount, Arrays.asList(list));
Arrays.asList(list));
} }
@Override @Override

View File

@ -28,15 +28,16 @@ import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.index.FilterableTermsEnum; import org.elasticsearch.common.lucene.index.FilterableTermsEnum;
import org.elasticsearch.common.lucene.index.FreqTermsEnum; import org.elasticsearch.common.lucene.index.FreqTermsEnum;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode; import org.elasticsearch.search.aggregations.Aggregator.BucketAggregationMode;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -60,9 +61,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
private int numberOfAggregatorsCreated = 0; private int numberOfAggregatorsCreated = 0;
private Filter filter; private Filter filter;
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, ValueFormatter formatter, ValueParser parser, public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, int requiredSize,
int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint, Filter filter) { int shardSize, long minDocCount, IncludeExclude includeExclude,
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig, formatter, parser); String executionHint, Filter filter) {
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig);
this.requiredSize = requiredSize; this.requiredSize = requiredSize;
this.shardSize = shardSize; this.shardSize = shardSize;
this.minDocCount = minDocCount; this.minDocCount = minDocCount;
@ -150,7 +153,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) { if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
throw new UnsupportedOperationException("No support for examining floating point numerics"); throw new UnsupportedOperationException("No support for examining floating point numerics");
} }
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this); return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, requiredSize, shardSize, minDocCount, aggregationContext, parent, this);
} }
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + config.fieldContext().field() + throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -19,27 +19,16 @@
package org.elasticsearch.search.aggregations.bucket.significant; package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.BucketUtils; import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.regex.Pattern;
/** /**
* *
@ -60,33 +49,32 @@ public class SignificantTermsParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
String field = null; ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context)
.scriptable(false)
.formattable(true)
.requiresSortedValues(true)
.requiresUniqueValues(true)
.build();
IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(aggregationName, SignificantStringTerms.TYPE, context);
Filter filter = null; Filter filter = null;
int requiredSize = DEFAULT_REQUIRED_SIZE; int requiredSize = DEFAULT_REQUIRED_SIZE;
int shardSize = DEFAULT_SHARD_SIZE; int shardSize = DEFAULT_SHARD_SIZE;
String format = null;
String include = null;
int includeFlags = 0; // 0 means no flags
String exclude = null;
int excludeFlags = 0; // 0 means no flags
String executionHint = null;
long minDocCount = DEFAULT_MIN_DOC_COUNT; long minDocCount = DEFAULT_MIN_DOC_COUNT;
String executionHint = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (incExcParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) { if ("execution_hint".equals(currentFieldName) || "executionHint".equals(currentFieldName)) {
field = parser.text();
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else if ("include".equals(currentFieldName)) {
include = parser.text();
} else if ("exclude".equals(currentFieldName)) {
exclude = parser.text();
} else if ("execution_hint".equals(currentFieldName) || "executionHint".equals(currentFieldName)) {
executionHint = parser.text(); executionHint = parser.text();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
@ -112,43 +100,8 @@ public class SignificantTermsParser implements Aggregator.Parser {
// filters defined by end users and parsed below are not. // filters defined by end users and parsed below are not.
// if ("background_context".equals(currentFieldName)) { // if ("background_context".equals(currentFieldName)) {
// filter = context.queryParserService().parseInnerFilter(parser).filter(); // filter = context.queryParserService().parseInnerFilter(parser).filter();
// } else // }
if ("include".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
include = parser.text();
} else if ("flags".equals(currentFieldName)) {
includeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
includeFlags = parser.intValue();
}
}
}
} else if ("exclude".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
exclude = parser.text();
} else if ("flags".equals(currentFieldName)) {
excludeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
excludeFlags = parser.intValue();
}
}
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else { } else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]."); throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
} }
@ -171,55 +124,8 @@ public class SignificantTermsParser implements Aggregator.Parser {
shardSize = requiredSize; shardSize = requiredSize;
} }
IncludeExclude includeExclude = null; IncludeExclude includeExclude = incExcParser.includeExclude();
if (include != null || exclude != null) { return new SignificantTermsAggregatorFactory(aggregationName, vsParser.config(), requiredSize, shardSize, minDocCount, includeExclude, executionHint, filter);
Pattern includePattern = include != null ? Pattern.compile(include, includeFlags) : null;
Pattern excludePattern = exclude != null ? Pattern.compile(exclude, excludeFlags) : null;
includeExclude = new IncludeExclude(includePattern, excludePattern);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
config.unmapped(true);
return new SignificantTermsAggregatorFactory(aggregationName, config, null, null, requiredSize, shardSize, minDocCount, includeExclude, executionHint, filter);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
ValuesSourceConfig<?> config;
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (mapper instanceof DateFieldMapper) {
DateFieldMapper dateMapper = (DateFieldMapper) mapper;
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = format == null ?
new ValueFormatter.DateTime(dateMapper.dateTimeFormatter()) :
new ValueFormatter.DateTime(format);
valueParser = new ValueParser.DateMath(dateMapper.dateMathParser());
} else if (mapper instanceof IpFieldMapper) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = ValueFormatter.IPv4;
valueParser = ValueParser.IPv4;
} else if (indexFieldData instanceof IndexNumericFieldData) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
if (format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
}
} else {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
// TODO: it will make sense to set false instead here if the aggregator factory uses
// ordinals instead of hash tables
config.needsHashes(true);
}
config.fieldContext(new FieldContext(field, indexFieldData));
// We need values to be unique to be able to run terms aggs efficiently
config.ensureUnique(true);
return new SignificantTermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, requiredSize, shardSize, minDocCount, includeExclude, executionHint, filter);
} }
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.terms; package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
@ -88,17 +89,13 @@ public class DoubleTerms extends InternalTerms {
} }
private ValueFormatter valueFormatter; private @Nullable ValueFormatter formatter;
DoubleTerms() {} // for serialization DoubleTerms() {} // for serialization
public DoubleTerms(String name, InternalOrder order, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) { public DoubleTerms(String name, InternalOrder order, @Nullable ValueFormatter formatter, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) {
this(name, order, null, requiredSize, minDocCount, buckets);
}
public DoubleTerms(String name, InternalOrder order, ValueFormatter valueFormatter, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) {
super(name, order, requiredSize, minDocCount, buckets); super(name, order, requiredSize, minDocCount, buckets);
this.valueFormatter = valueFormatter; this.formatter = formatter;
} }
@Override @Override
@ -166,7 +163,7 @@ public class DoubleTerms extends InternalTerms {
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
this.name = in.readString(); this.name = in.readString();
this.order = InternalOrder.Streams.readOrder(in); this.order = InternalOrder.Streams.readOrder(in);
this.valueFormatter = ValueFormatterStreams.readOptional(in); this.formatter = ValueFormatterStreams.readOptional(in);
this.requiredSize = readSize(in); this.requiredSize = readSize(in);
this.minDocCount = in.readVLong(); this.minDocCount = in.readVLong();
int size = in.readVInt(); int size = in.readVInt();
@ -182,7 +179,7 @@ public class DoubleTerms extends InternalTerms {
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(name); out.writeString(name);
InternalOrder.Streams.writeOrder(order, out); InternalOrder.Streams.writeOrder(order, out);
ValueFormatterStreams.writeOptional(valueFormatter, out); ValueFormatterStreams.writeOptional(formatter, out);
writeSize(requiredSize, out); writeSize(requiredSize, out);
out.writeVLong(minDocCount); out.writeVLong(minDocCount);
out.writeVInt(buckets.size()); out.writeVInt(buckets.size());
@ -200,8 +197,8 @@ public class DoubleTerms extends InternalTerms {
for (InternalTerms.Bucket bucket : buckets) { for (InternalTerms.Bucket bucket : buckets) {
builder.startObject(); builder.startObject();
builder.field(CommonFields.KEY, ((Bucket) bucket).term); builder.field(CommonFields.KEY, ((Bucket) bucket).term);
if (valueFormatter != null) { if (formatter != null) {
builder.field(CommonFields.KEY_AS_STRING, valueFormatter.format(((Bucket) bucket).term)); builder.field(CommonFields.KEY_AS_STRING, formatter.format(((Bucket) bucket).term));
} }
builder.field(CommonFields.DOC_COUNT, bucket.getDocCount()); builder.field(CommonFields.DOC_COUNT, bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params); ((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.terms; package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.index.fielddata.DoubleValues;
@ -28,6 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException; import java.io.IOException;
@ -48,11 +50,11 @@ public class DoubleTermsAggregator extends BucketsAggregator {
private final LongHash bucketOrds; private final LongHash bucketOrds;
private DoubleValues values; private DoubleValues values;
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter, long estimatedBucketCount, public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, long estimatedBucketCount,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) { InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent); super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
this.formatter = formatter; this.formatter = format != null ? format.formatter() : null;
this.order = InternalOrder.validate(order, this); this.order = InternalOrder.validate(order, this);
this.requiredSize = requiredSize; this.requiredSize = requiredSize;
this.shardSize = shardSize; this.shardSize = shardSize;

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.terms; package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
@ -88,13 +89,13 @@ public class LongTerms extends InternalTerms {
} }
} }
private ValueFormatter valueFormatter; private @Nullable ValueFormatter formatter;
LongTerms() {} // for serialization LongTerms() {} // for serialization
public LongTerms(String name, InternalOrder order, ValueFormatter valueFormatter, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) { public LongTerms(String name, InternalOrder order, @Nullable ValueFormatter formatter, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) {
super(name, order, requiredSize, minDocCount, buckets); super(name, order, requiredSize, minDocCount, buckets);
this.valueFormatter = valueFormatter; this.formatter = formatter;
} }
@Override @Override
@ -162,7 +163,7 @@ public class LongTerms extends InternalTerms {
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
this.name = in.readString(); this.name = in.readString();
this.order = InternalOrder.Streams.readOrder(in); this.order = InternalOrder.Streams.readOrder(in);
this.valueFormatter = ValueFormatterStreams.readOptional(in); this.formatter = ValueFormatterStreams.readOptional(in);
this.requiredSize = readSize(in); this.requiredSize = readSize(in);
this.minDocCount = in.readVLong(); this.minDocCount = in.readVLong();
int size = in.readVInt(); int size = in.readVInt();
@ -178,7 +179,7 @@ public class LongTerms extends InternalTerms {
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(name); out.writeString(name);
InternalOrder.Streams.writeOrder(order, out); InternalOrder.Streams.writeOrder(order, out);
ValueFormatterStreams.writeOptional(valueFormatter, out); ValueFormatterStreams.writeOptional(formatter, out);
writeSize(requiredSize, out); writeSize(requiredSize, out);
out.writeVLong(minDocCount); out.writeVLong(minDocCount);
out.writeVInt(buckets.size()); out.writeVInt(buckets.size());
@ -196,8 +197,8 @@ public class LongTerms extends InternalTerms {
for (InternalTerms.Bucket bucket : buckets) { for (InternalTerms.Bucket bucket : buckets) {
builder.startObject(); builder.startObject();
builder.field(CommonFields.KEY, ((Bucket) bucket).term); builder.field(CommonFields.KEY, ((Bucket) bucket).term);
if (valueFormatter != null) { if (formatter != null) {
builder.field(CommonFields.KEY_AS_STRING, valueFormatter.format(((Bucket) bucket).term)); builder.field(CommonFields.KEY_AS_STRING, formatter.format(((Bucket) bucket).term));
} }
builder.field(CommonFields.DOC_COUNT, bucket.getDocCount()); builder.field(CommonFields.DOC_COUNT, bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params); ((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.terms; package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.LongValues; import org.elasticsearch.index.fielddata.LongValues;
@ -29,6 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException; import java.io.IOException;
@ -45,15 +47,15 @@ public class LongTermsAggregator extends BucketsAggregator {
protected final int shardSize; protected final int shardSize;
protected final long minDocCount; protected final long minDocCount;
protected final ValuesSource.Numeric valuesSource; protected final ValuesSource.Numeric valuesSource;
protected final ValueFormatter formatter; protected final @Nullable ValueFormatter formatter;
protected final LongHash bucketOrds; protected final LongHash bucketOrds;
private LongValues values; private LongValues values;
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormatter formatter, long estimatedBucketCount, public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, long estimatedBucketCount,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) { InternalOrder order, int requiredSize, int shardSize, long minDocCount, AggregationContext aggregationContext, Aggregator parent) {
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent); super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
this.formatter = formatter; this.formatter = format != null ? format.formatter() : null;
this.order = InternalOrder.validate(order, this); this.order = InternalOrder.validate(order, this);
this.requiredSize = requiredSize; this.requiredSize = requiredSize;
this.shardSize = shardSize; this.shardSize = shardSize;

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.support.ScriptValueType;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator; import java.util.Comparator;
@ -34,13 +33,13 @@ public interface Terms extends MultiBucketsAggregation {
static enum ValueType { static enum ValueType {
STRING(ScriptValueType.STRING), STRING(org.elasticsearch.search.aggregations.support.ValueType.STRING),
LONG(ScriptValueType.LONG), LONG(org.elasticsearch.search.aggregations.support.ValueType.LONG),
DOUBLE(ScriptValueType.DOUBLE); DOUBLE(org.elasticsearch.search.aggregations.support.ValueType.DOUBLE);
final ScriptValueType scriptValueType; final org.elasticsearch.search.aggregations.support.ValueType scriptValueType;
private ValueType(ScriptValueType scriptValueType) { private ValueType(org.elasticsearch.search.aggregations.support.ValueType scriptValueType) {
this.scriptValueType = scriptValueType; this.scriptValueType = scriptValueType;
} }

View File

@ -27,8 +27,6 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
/** /**
* *
@ -140,10 +138,10 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory {
private final IncludeExclude includeExclude; private final IncludeExclude includeExclude;
private final String executionHint; private final String executionHint;
public TermsAggregatorFactory(String name, ValuesSourceConfig config, ValueFormatter formatter, ValueParser parser, public TermsAggregatorFactory(String name, ValuesSourceConfig config, InternalOrder order, int requiredSize,
InternalOrder order, int requiredSize, int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) { int shardSize, long minDocCount, IncludeExclude includeExclude, String executionHint) {
super(name, StringTerms.TYPE.name(), config, formatter, parser); super(name, StringTerms.TYPE.name(), config);
this.order = order; this.order = order;
this.requiredSize = requiredSize; this.requiredSize = requiredSize;
this.shardSize = shardSize; this.shardSize = shardSize;
@ -225,9 +223,9 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory {
if (valuesSource instanceof ValuesSource.Numeric) { if (valuesSource instanceof ValuesSource.Numeric) {
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) { if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent); return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
} }
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, formatter, estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent); return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, order, requiredSize, shardSize, minDocCount, aggregationContext, parent);
} }
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field() + throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -18,28 +18,15 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.terms; package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
import java.util.regex.Pattern;
/** /**
* *
@ -54,56 +41,37 @@ public class TermsParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
Terms.ValueType valueType = null;
int requiredSize = 10; int requiredSize = 10;
int shardSize = -1; int shardSize = -1;
String orderKey = "_count"; String orderKey = "_count";
boolean orderAsc = false; boolean orderAsc = false;
String format = null;
boolean assumeUnique = false;
String include = null;
int includeFlags = 0; // 0 means no flags
String exclude = null;
int excludeFlags = 0; // 0 means no flags
String executionHint = null; String executionHint = null;
long minDocCount = 1; long minDocCount = 1;
ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context)
.requiresSortedValues(true)
.requiresUniqueValues(true)
.formattable(true)
.build();
IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(aggregationName, StringTerms.TYPE, context);
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (incExcParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) { if ("execution_hint".equals(currentFieldName) || "executionHint".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) {
valueType = Terms.ValueType.resolveType(parser.text());
} else if ("format".equals(currentFieldName)) {
format = parser.text();
} else if ("include".equals(currentFieldName)) {
include = parser.text();
} else if ("exclude".equals(currentFieldName)) {
exclude = parser.text();
} else if ("execution_hint".equals(currentFieldName) || "executionHint".equals(currentFieldName)) {
executionHint = parser.text(); executionHint = parser.text();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("script_values_unique".equals(currentFieldName) || "scriptValuesUnique".equals(currentFieldName)) {
assumeUnique = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_NUMBER) { } else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("size".equals(currentFieldName)) { if ("size".equals(currentFieldName)) {
requiredSize = parser.intValue(); requiredSize = parser.intValue();
@ -115,9 +83,7 @@ public class TermsParser implements Aggregator.Parser {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) { if ("order".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("order".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
orderKey = parser.currentName(); orderKey = parser.currentName();
@ -134,38 +100,6 @@ public class TermsParser implements Aggregator.Parser {
throw new SearchParseException(context, "Unexpected token " + token + " for [order] in [" + aggregationName + "]."); throw new SearchParseException(context, "Unexpected token " + token + " for [order] in [" + aggregationName + "].");
} }
} }
} else if ("include".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
include = parser.text();
} else if ("flags".equals(currentFieldName)) {
includeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
includeFlags = parser.intValue();
}
}
}
} else if ("exclude".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
exclude = parser.text();
} else if ("flags".equals(currentFieldName)) {
excludeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
excludeFlags = parser.intValue();
}
}
}
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
@ -187,101 +121,9 @@ public class TermsParser implements Aggregator.Parser {
shardSize = requiredSize; shardSize = requiredSize;
} }
IncludeExclude includeExclude = null; IncludeExclude includeExclude = incExcParser.includeExclude();
if (include != null || exclude != null) {
Pattern includePattern = include != null ? Pattern.compile(include, includeFlags) : null;
Pattern excludePattern = exclude != null ? Pattern.compile(exclude, excludeFlags) : null;
includeExclude = new IncludeExclude(includePattern, excludePattern);
}
InternalOrder order = resolveOrder(orderKey, orderAsc); InternalOrder order = resolveOrder(orderKey, orderAsc);
SearchScript searchScript = null; return new TermsAggregatorFactory(aggregationName, vsParser.config(), order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
if (script != null) {
searchScript = context.scriptService().search(context.lookup(), scriptLang, script, scriptParams);
}
if (field == null) {
Class<? extends ValuesSource> valueSourceType = script == null ?
ValuesSource.class : // unknown, will inherit whatever is in the context
valueType != null ? valueType.scriptValueType.getValuesSourceType() : // the user explicitly defined a value type
ValuesSource.Bytes.class; // defaulting to bytes
ValuesSourceConfig<?> config = new ValuesSourceConfig(valueSourceType);
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (valueType != null) {
config.scriptValueType(valueType.scriptValueType);
if (valueType != Terms.ValueType.STRING && format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
}
config.script(searchScript);
if (!assumeUnique) {
config.ensureUnique(true);
}
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
ValuesSourceConfig<?> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
config.unmapped(true);
if (valueType != null) {
config.scriptValueType(valueType.scriptValueType);
if (valueType != Terms.ValueType.STRING && format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
}
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
ValuesSourceConfig<?> config;
ValueFormatter valueFormatter = null;
ValueParser valueParser = null;
if (mapper instanceof DateFieldMapper) {
DateFieldMapper dateMapper = (DateFieldMapper) mapper;
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = format == null ?
new ValueFormatter.DateTime(dateMapper.dateTimeFormatter()) :
new ValueFormatter.DateTime(format);
valueParser = new ValueParser.DateMath(dateMapper.dateMathParser());
} else if (mapper instanceof IpFieldMapper) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
valueFormatter = ValueFormatter.IPv4;
valueParser = ValueParser.IPv4;
} else if (indexFieldData instanceof IndexNumericFieldData) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
if (format != null) {
valueFormatter = new ValueFormatter.Number.Pattern(format);
valueParser = new ValueParser.Number.Pattern(format);
}
} else {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
// TODO: it will make sense to set false instead here if the aggregator factory uses
// ordinals instead of hash tables
config.needsHashes(true);
}
config.script(searchScript);
config.fieldContext(new FieldContext(field, indexFieldData));
// We need values to be unique to be able to run terms aggs efficiently
if (!assumeUnique) {
config.ensureUnique(true);
}
return new TermsAggregatorFactory(aggregationName, config, valueFormatter, valueParser, order, requiredSize, shardSize, minDocCount, includeExclude, executionHint);
} }
static InternalOrder resolveOrder(String key, boolean asc) { static InternalOrder resolveOrder(String key, boolean asc) {

View File

@ -21,7 +21,13 @@ package org.elasticsearch.search.aggregations.bucket.terms.support;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -64,4 +70,86 @@ public class IncludeExclude {
} }
return !exclude.reset(scratch).matches(); return !exclude.reset(scratch).matches();
} }
public static class Parser {
private final String aggName;
private final InternalAggregation.Type aggType;
private final SearchContext context;
String include = null;
int includeFlags = 0; // 0 means no flags
String exclude = null;
int excludeFlags = 0; // 0 means no flags
public Parser(String aggName, InternalAggregation.Type aggType, SearchContext context) {
this.aggName = aggName;
this.aggType = aggType;
this.context = context;
}
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
if ("include".equals(currentFieldName)) {
include = parser.text();
} else if ("exclude".equals(currentFieldName)) {
exclude = parser.text();
} else {
return false;
}
return true;
}
if (token == XContentParser.Token.START_OBJECT) {
if ("include".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
include = parser.text();
} else if ("flags".equals(currentFieldName)) {
includeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
includeFlags = parser.intValue();
}
}
}
} else if ("exclude".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("pattern".equals(currentFieldName)) {
exclude = parser.text();
} else if ("flags".equals(currentFieldName)) {
excludeFlags = Regex.flagsFromString(parser.text());
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("flags".equals(currentFieldName)) {
excludeFlags = parser.intValue();
}
}
}
} else {
return false;
}
return true;
}
return false;
}
public IncludeExclude includeExclude() {
if (include == null && exclude == null) {
return null;
}
Pattern includePattern = include != null ? Pattern.compile(include, includeFlags) : null;
Pattern excludePattern = exclude != null ? Pattern.compile(exclude, excludeFlags) : null;
return new IncludeExclude(includePattern, excludePattern);
}
}
} }

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
*
*/
public abstract class NumericValuesSourceMetricsAggregatorParser<S extends MetricsAggregation> implements Aggregator.Parser {
protected final InternalAggregation.Type aggType;
protected NumericValuesSourceMetricsAggregatorParser(InternalAggregation.Type aggType) {
this.aggType = aggType;
}
@Override
public String type() {
return aggType.name();
}
protected boolean requiresSortedValues() {
return false;
}
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, aggType, context)
.requiresSortedValues(requiresSortedValues())
.build();
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (!vsParser.token(currentFieldName, token, parser)) {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
}
}
return createFactory(aggregationName, vsParser.config());
}
protected abstract AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config);
}

View File

@ -1,111 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public abstract class ValuesSourceMetricsAggregatorParser<S extends MetricsAggregation> implements Aggregator.Parser {
protected boolean requiresSortedValues() {
return false;
}
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean assumeSorted = false;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
}
}
if (script != null) {
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted && requiresSortedValues()) {
config.ensureSorted(true);
}
if (field == null) {
return createFactory(aggregationName, config);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return createFactory(aggregationName, config);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return createFactory(aggregationName, config);
}
protected abstract AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config);
}

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.avg; package org.elasticsearch.search.aggregations.metrics.avg;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class AvgParser extends ValuesSourceMetricsAggregatorParser<InternalAvg> { public class AvgParser extends NumericValuesSourceMetricsAggregatorParser<InternalAvg> {
@Override public AvgParser() {
public String type() { super(InternalAvg.TYPE);
return InternalAvg.TYPE.name();
} }
@Override @Override

View File

@ -21,20 +21,15 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.Murmur3FieldMapper; import org.elasticsearch.index.mapper.core.Murmur3FieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
public class CardinalityParser implements Aggregator.Parser { public class CardinalityParser implements Aggregator.Parser {
@ -48,34 +43,19 @@ public class CardinalityParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).build();
long precisionThreshold = -1; long precisionThreshold = -1;
Boolean rehash = null; Boolean rehash = null;
String field = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + name + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + name + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) { } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("rehash".equals(currentFieldName)) { if ("rehash".equals(currentFieldName)) {
rehash = parser.booleanValue(); rehash = parser.booleanValue();
@ -93,41 +73,16 @@ public class CardinalityParser implements Aggregator.Parser {
} }
} }
ValuesSourceConfig<?> config = null; ValuesSourceConfig<?> config = vsParser.config();
if (script != null) { if (rehash == null && config.fieldContext() != null && config.fieldContext().mapper() instanceof Murmur3FieldMapper) {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (field != null) {
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (config == null) {
if (mapper instanceof NumberFieldMapper) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
} else {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
if (mapper == null) {
config.unmapped(true);
}
}
if (rehash == null && mapper instanceof Murmur3FieldMapper) {
rehash = false; rehash = false;
} } else if (rehash == null) {
}
if (mapper != null) {
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
}
} else if (config == null) {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
}
if (rehash == null) {
rehash = true; rehash = true;
} }
return new CardinalityAggregatorFactory(name, config, precisionThreshold, rehash); return new CardinalityAggregatorFactory(name, config, precisionThreshold, rehash);
} }
} }

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.max; package org.elasticsearch.search.aggregations.metrics.max;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class MaxParser extends ValuesSourceMetricsAggregatorParser<InternalMax> { public class MaxParser extends NumericValuesSourceMetricsAggregatorParser<InternalMax> {
@Override public MaxParser() {
public String type() { super(InternalMax.TYPE);
return InternalMax.TYPE.name();
} }
@Override @Override

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.min; package org.elasticsearch.search.aggregations.metrics.min;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class MinParser extends ValuesSourceMetricsAggregatorParser<InternalMin> { public class MinParser extends NumericValuesSourceMetricsAggregatorParser<InternalMin> {
@Override public MinParser() {
public String type() { super(InternalMin.TYPE);
return InternalMin.TYPE.name();
} }
@Override @Override

View File

@ -20,15 +20,12 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.DoubleArrayList;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigest; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigest;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -55,14 +52,11 @@ public class PercentilesParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> config = new ValuesSourceConfig<>(ValuesSource.Numeric.class); ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalPercentiles.TYPE, context)
.requiresSortedValues(true)
.build();
String field = null;
String script = null;
String scriptLang = null;
double[] percents = DEFAULT_PERCENTS; double[] percents = DEFAULT_PERCENTS;
Map<String, Object> scriptParams = null;
boolean assumeSorted = false;
boolean keyed = true; boolean keyed = true;
Map<String, Object> settings = null; Map<String, Object> settings = null;
@ -71,19 +65,8 @@ public class PercentilesParser implements Aggregator.Parser {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) { continue;
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
if (settings == null) {
settings = new HashMap<>();
}
settings.put(currentFieldName, parser.text());
}
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("percents".equals(currentFieldName)) { if ("percents".equals(currentFieldName)) {
DoubleArrayList values = new DoubleArrayList(10); DoubleArrayList values = new DoubleArrayList(10);
@ -101,56 +84,22 @@ public class PercentilesParser implements Aggregator.Parser {
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token.isValue()) {
if ("params".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_BOOLEAN && "keyed".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue(); keyed = parser.booleanValue();
} else { } else {
if (settings == null) { if (settings == null) {
settings = new HashMap<>(); settings = new HashMap<>();
} }
settings.put(currentFieldName, parser.booleanValue()); settings.put(currentFieldName, parser.objectText());
} }
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (settings == null) {
settings = new HashMap<>();
}
settings.put(currentFieldName, parser.numberValue());
} else { } else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]."); throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
} }
} }
PercentilesEstimator.Factory estimatorFactory = EstimatorType.TDIGEST.estimatorFactory(settings); PercentilesEstimator.Factory estimatorFactory = EstimatorType.TDIGEST.estimatorFactory(settings);
return new PercentilesAggregator.Factory(aggregationName, vsParser.config(), percents, estimatorFactory, keyed);
if (script != null) {
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
config.ensureSorted(true);
}
if (field == null) {
return new PercentilesAggregator.Factory(aggregationName, config, percents, estimatorFactory, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new PercentilesAggregator.Factory(aggregationName, config, percents, estimatorFactory, keyed);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new PercentilesAggregator.Factory(aggregationName, config, percents, estimatorFactory, keyed);
} }
/** /**

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.stats; package org.elasticsearch.search.aggregations.metrics.stats;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class StatsParser extends ValuesSourceMetricsAggregatorParser<InternalStats> { public class StatsParser extends NumericValuesSourceMetricsAggregatorParser<InternalStats> {
@Override public StatsParser() {
public String type() { super(InternalStats.TYPE);
return InternalStats.TYPE.name();
} }
@Override @Override

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.stats.extended; package org.elasticsearch.search.aggregations.metrics.stats.extended;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class ExtendedStatsParser extends ValuesSourceMetricsAggregatorParser<InternalExtendedStats> { public class ExtendedStatsParser extends NumericValuesSourceMetricsAggregatorParser<InternalExtendedStats> {
@Override public ExtendedStatsParser() {
public String type() { super(InternalExtendedStats.TYPE);
return InternalExtendedStats.TYPE.name();
} }
@Override @Override

View File

@ -19,18 +19,17 @@
package org.elasticsearch.search.aggregations.metrics.sum; package org.elasticsearch.search.aggregations.metrics.sum;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregatorParser; import org.elasticsearch.search.aggregations.metrics.NumericValuesSourceMetricsAggregatorParser;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
/** /**
* *
*/ */
public class SumParser extends ValuesSourceMetricsAggregatorParser<InternalSum> { public class SumParser extends NumericValuesSourceMetricsAggregatorParser<InternalSum> {
@Override public SumParser() {
public String type() { super(InternalSum.TYPE);
return InternalSum.TYPE.name();
} }
@Override @Override

View File

@ -40,13 +40,13 @@ import java.io.IOException;
*/ */
public class ValueCountAggregator extends MetricsAggregator.SingleValue { public class ValueCountAggregator extends MetricsAggregator.SingleValue {
private final ValuesSource.Bytes valuesSource; private final ValuesSource valuesSource;
private BytesValues values; private BytesValues values;
// a count per bucket // a count per bucket
LongArray counts; LongArray counts;
public ValueCountAggregator(String name, long expectedBucketsCount, ValuesSource.Bytes valuesSource, AggregationContext aggregationContext, Aggregator parent) { public ValueCountAggregator(String name, long expectedBucketsCount, ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent) {
super(name, 0, aggregationContext, parent); super(name, 0, aggregationContext, parent);
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
if (valuesSource != null) { if (valuesSource != null) {
@ -96,10 +96,10 @@ public class ValueCountAggregator extends MetricsAggregator.SingleValue {
Releasables.release(counts); Releasables.release(counts);
} }
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Bytes> { public static class Factory<VS extends ValuesSource> extends ValuesSourceAggregatorFactory.LeafOnly<VS> {
public Factory(String name, ValuesSourceConfig<ValuesSource.Bytes> valuesSourceBuilder) { public Factory(String name, ValuesSourceConfig<VS> config) {
super(name, InternalValueCount.TYPE.name(), valuesSourceBuilder); super(name, InternalValueCount.TYPE.name(), config);
} }
@Override @Override
@ -108,7 +108,7 @@ public class ValueCountAggregator extends MetricsAggregator.SingleValue {
} }
@Override @Override
protected Aggregator create(ValuesSource.Bytes valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) { protected Aggregator create(ValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
return new ValueCountAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent); return new ValueCountAggregator(name, expectedBucketsCount, valuesSource, aggregationContext, parent);
} }

View File

@ -19,18 +19,13 @@
package org.elasticsearch.search.aggregations.metrics.valuecount; package org.elasticsearch.search.aggregations.metrics.valuecount;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
/** /**
* *
@ -45,64 +40,20 @@ public class ValueCountParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<ValuesSource.Bytes> config = new ValuesSourceConfig<>(ValuesSource.Bytes.class); ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context)
.requiresUniqueValues(true)
String field = null; .build();
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean assumeUnique = false;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (!vsParser.token(currentFieldName, token, parser)) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("script_values_unique".equals(currentFieldName) || "scriptValuesUnique".equals(currentFieldName)) {
assumeUnique = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]."); throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
} }
} }
if (script != null) { return new ValueCountAggregator.Factory(aggregationName, vsParser.config());
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeUnique) {
config.ensureUnique(true);
}
if (field == null) {
return new ValueCountAggregator.Factory(aggregationName, config);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new ValueCountAggregator.Factory(aggregationName, config);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new ValueCountAggregator.Factory(aggregationName, config);
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.support; package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
/** /**
* Used by all field data based aggregators. This determine the context of the field data the aggregators are operating * Used by all field data based aggregators. This determine the context of the field data the aggregators are operating
@ -28,6 +29,7 @@ public class FieldContext {
private final String field; private final String field;
private final IndexFieldData<?> indexFieldData; private final IndexFieldData<?> indexFieldData;
private final FieldMapper<?> mapper;
/** /**
* Constructs a field data context for the given field and its index field data * Constructs a field data context for the given field and its index field data
@ -35,9 +37,10 @@ public class FieldContext {
* @param field The name of the field * @param field The name of the field
* @param indexFieldData The index field data of the field * @param indexFieldData The index field data of the field
*/ */
public FieldContext(String field, IndexFieldData<?> indexFieldData) { public FieldContext(String field, IndexFieldData<?> indexFieldData, FieldMapper mapper) {
this.field = field; this.field = field;
this.indexFieldData = indexFieldData; this.indexFieldData = indexFieldData;
this.mapper = mapper;
} }
public String field() { public String field() {
@ -51,4 +54,8 @@ public class FieldContext {
return indexFieldData; return indexFieldData;
} }
public FieldMapper mapper() {
return mapper;
}
} }

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
*
*/
public class GeoPointParser {
private final String aggName;
private final InternalAggregation.Type aggType;
private final SearchContext context;
private final ParseField field;
GeoPoint point;
public GeoPointParser(String aggName, InternalAggregation.Type aggType, SearchContext context, ParseField field) {
this.aggName = aggName;
this.aggType = aggType;
this.context = context;
this.field = field;
}
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException {
if (!field.match(currentFieldName)) {
return false;
}
if (token == XContentParser.Token.VALUE_STRING) {
point = new GeoPoint();
point.resetFromString(parser.text());
return true;
}
if (token == XContentParser.Token.START_ARRAY) {
double lat = Double.NaN;
double lon = Double.NaN;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (Double.isNaN(lon)) {
lon = parser.doubleValue();
} else if (Double.isNaN(lat)) {
lat = parser.doubleValue();
} else {
throw new SearchParseException(context, "malformed [" + currentFieldName + "] geo point array in [" +
aggName + "] " + aggType + " aggregation. a geo point array must be of the form [lon, lat]");
}
}
point = new GeoPoint(lat, lon);
return true;
}
if (token == XContentParser.Token.START_OBJECT) {
double lat = Double.NaN;
double lon = Double.NaN;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("lat".equals(currentFieldName)) {
lat = parser.doubleValue();
} else if ("lon".equals(currentFieldName)) {
lon = parser.doubleValue();
}
}
}
if (Double.isNaN(lat) || Double.isNaN(lon)) {
throw new SearchParseException(context, "malformed [" + currentFieldName + "] geo point object. either [lat] or [lon] (or both) are " +
"missing in [" + aggName + "] " + aggType + " aggregation");
}
point = new GeoPoint(lat, lon);
return true;
}
return false;
}
public GeoPoint geoPoint() {
return point;
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
/**
*
*/
public enum ScriptValueType {
STRING(ValuesSource.Bytes.class),
LONG(ValuesSource.Numeric.class),
DOUBLE(ValuesSource.Numeric.class);
final Class<? extends ValuesSource> valuesSourceType;
private ScriptValueType(Class<? extends ValuesSource> valuesSourceType) {
this.valuesSourceType = valuesSourceType;
}
public Class<? extends ValuesSource> getValuesSourceType() {
return valuesSourceType;
}
public boolean isNumeric() {
return this != STRING;
}
public boolean isFloatingPoint() {
return this == DOUBLE;
}
}

View File

@ -0,0 +1,151 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
/**
*
*/
public enum ValueType {
@Deprecated ANY("any", ValuesSource.class, IndexFieldData.class, null),
STRING("string", ValuesSource.Bytes.class, IndexFieldData.class, null),
LONG("byte|short|integer|long", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.RAW) {
@Override
public boolean isNumeric() {
return true;
}
},
DOUBLE("float|double", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.RAW) {
@Override
public boolean isNumeric() {
return true;
}
@Override
public boolean isFloatingPoint() {
return true;
}
},
NUMBER("number", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.RAW) {
@Override
public boolean isNumeric() {
return true;
}
},
DATE("date", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.DateTime.DEFAULT) {
@Override
public boolean isNumeric() {
return true;
}
},
IP("ip", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.IPv4) {
@Override
public boolean isNumeric() {
return true;
}
},
NUMERIC("numeric", ValuesSource.Numeric.class, IndexNumericFieldData.class, ValueFormat.RAW) {
@Override
public boolean isNumeric() {
return true;
}
},
GEOPOINT("geo_point", ValuesSource.GeoPoint.class, IndexGeoPointFieldData.class, null) {
@Override
public boolean isGeoPoint() {
return true;
}
};
final String description;
final Class<? extends ValuesSource> valuesSourceType;
final Class<? extends IndexFieldData> fieldDataType;
final ValueFormat defaultFormat;
private ValueType(String description, Class<? extends ValuesSource> valuesSourceType, Class<? extends IndexFieldData> fieldDataType, ValueFormat defaultFormat) {
this.description = description;
this.valuesSourceType = valuesSourceType;
this.fieldDataType = fieldDataType;
this.defaultFormat = defaultFormat;
}
public String description() {
return description;
}
public Class<? extends ValuesSource> getValuesSourceType() {
return valuesSourceType;
}
public boolean compatibleWith(IndexFieldData fieldData) {
return fieldDataType.isInstance(fieldData);
}
public boolean isA(ValueType valueType) {
return valueType.valuesSourceType.isAssignableFrom(valuesSourceType) &&
valueType.fieldDataType.isAssignableFrom(fieldDataType);
}
public boolean isNotA(ValueType valueType) {
return !isA(valueType);
}
public ValueFormat defaultFormat() {
return defaultFormat;
}
public boolean isNumeric() {
return false;
}
public boolean isFloatingPoint() {
return false;
}
public boolean isGeoPoint() {
return false;
}
public static ValueType resolveForScript(String type) {
switch (type) {
case "string": return STRING;
case "double":
case "float": return DOUBLE;
case "long":
case "integer":
case "short":
case "byte": return LONG;
case "date": return DATE;
case "ip": return IP;
default:
return null;
}
}
@Override
public String toString() {
return description;
}
}

View File

@ -545,13 +545,13 @@ public abstract class ValuesSource {
} }
public static class Script extends Numeric { public static class Script extends Numeric {
private final ScriptValueType scriptValueType; private final ValueType scriptValueType;
private final ScriptDoubleValues doubleValues; private final ScriptDoubleValues doubleValues;
private final ScriptLongValues longValues; private final ScriptLongValues longValues;
private final ScriptBytesValues bytesValues; private final ScriptBytesValues bytesValues;
public Script(SearchScript script, ScriptValueType scriptValueType) { public Script(SearchScript script, ValueType scriptValueType) {
this.scriptValueType = scriptValueType; this.scriptValueType = scriptValueType;
longValues = new ScriptLongValues(script); longValues = new ScriptLongValues(script);
doubleValues = new ScriptDoubleValues(script); doubleValues = new ScriptDoubleValues(script);

View File

@ -19,8 +19,7 @@
package org.elasticsearch.search.aggregations.support; package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
/** /**
* *
@ -33,10 +32,6 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
super(name, type, valuesSourceConfig); super(name, type, valuesSourceConfig);
} }
protected LeafOnly(String name, String type, ValuesSourceConfig<VS> valuesSourceConfig, ValueFormatter formatter, ValueParser parser) {
super(name, type, valuesSourceConfig, formatter, parser);
}
@Override @Override
public AggregatorFactory subFactories(AggregatorFactories subFactories) { public AggregatorFactory subFactories(AggregatorFactories subFactories) {
throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + type + "] cannot accept sub-aggregations"); throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + type + "] cannot accept sub-aggregations");
@ -44,18 +39,10 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
} }
protected ValuesSourceConfig<VS> config; protected ValuesSourceConfig<VS> config;
protected ValueFormatter formatter;
protected ValueParser parser;
protected ValuesSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> config) { protected ValuesSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> config) {
this(name, type, config, null, null);
}
protected ValuesSourceAggregatorFactory(String name, String type, ValuesSourceConfig<VS> config, ValueFormatter formatter, ValueParser parser) {
super(name, type); super(name, type);
this.config = config; this.config = config;
this.formatter = formatter;
this.parser = parser;
} }
@Override @Override
@ -85,9 +72,13 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
config = ((ValuesSourceAggregatorFactory) parent).config; config = ((ValuesSourceAggregatorFactory) parent).config;
if (config != null && config.valid()) { if (config != null && config.valid()) {
if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) { if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) {
ValueFormat format = config.format;
this.config = config; this.config = config;
this.formatter = ((ValuesSourceAggregatorFactory) parent).formatter; // if the user explicitly defined a format pattern, we'll do our best to keep it even when we inherit the
this.parser = ((ValuesSourceAggregatorFactory) parent).parser; // value source form one of the ancestor aggregations
if (this.config.formatPattern != null && format != null && format instanceof ValueFormat.Patternable) {
this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern);
}
return; return;
} }
} }

View File

@ -19,6 +19,9 @@
package org.elasticsearch.search.aggregations.support; package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
/** /**
* *
@ -28,11 +31,13 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
final Class<VS> valueSourceType; final Class<VS> valueSourceType;
FieldContext fieldContext; FieldContext fieldContext;
SearchScript script; SearchScript script;
ScriptValueType scriptValueType; ValueType scriptValueType;
boolean unmapped = false; boolean unmapped = false;
boolean needsHashes = false; boolean needsHashes = false;
boolean ensureUnique = false; boolean ensureUnique = false;
boolean ensureSorted = false; boolean ensureSorted = false;
String formatPattern;
ValueFormat format;
public ValuesSourceConfig(Class<VS> valueSourceType) { public ValuesSourceConfig(Class<VS> valueSourceType) {
this.valueSourceType = valueSourceType; this.valueSourceType = valueSourceType;
@ -46,6 +51,10 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
return fieldContext; return fieldContext;
} }
public SearchScript script() {
return script;
}
public boolean unmapped() { public boolean unmapped() {
return unmapped; return unmapped;
} }
@ -64,15 +73,6 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
return this; return this;
} }
public ValuesSourceConfig<VS> scriptValueType(ScriptValueType scriptValueType) {
this.scriptValueType = scriptValueType;
return this;
}
public ScriptValueType scriptValueType() {
return scriptValueType;
}
public ValuesSourceConfig<VS> unmapped(boolean unmapped) { public ValuesSourceConfig<VS> unmapped(boolean unmapped) {
this.unmapped = unmapped; this.unmapped = unmapped;
return this; return this;
@ -83,13 +83,15 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
return this; return this;
} }
public ValuesSourceConfig<VS> ensureUnique(boolean unique) { public ValueFormat format() {
this.ensureUnique = unique; return format;
return this;
} }
public ValuesSourceConfig<VS> ensureSorted(boolean sorted) { public ValueFormatter formatter() {
this.ensureSorted = sorted; return format != null ? format.formatter() : null;
return this; }
public ValueParser parser() {
return format != null ? format.parser() : null;
} }
} }

View File

@ -0,0 +1,266 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ValuesSourceParser<VS extends ValuesSource> {
public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.class);
}
public static Builder<ValuesSource.Numeric> numeric(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.Numeric.class).targetValueType(ValueType.NUMERIC);
}
public static Builder<ValuesSource.Bytes> bytes(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.Bytes.class).targetValueType(ValueType.STRING);
}
public static Builder<ValuesSource.GeoPoint> geoPoint(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.GeoPoint.class).targetValueType(ValueType.GEOPOINT).scriptable(false);
}
private static class Input {
String field = null;
String script = null;
String lang = null;
Map<String, Object> params = null;
ValueType valueType = null;
boolean assumeUnique = false;
boolean assumeSorted = false;
String format = null;
}
private final String aggName;
private final InternalAggregation.Type aggType;
private final SearchContext context;
private final Class<VS> valuesSourceType;
private boolean scriptable = true;
private boolean formattable = false;
private ValueType targetValueType = null;
private boolean requiresSortedValues = false;
private boolean requiresUniqueValues = false;
private Input input = new Input();
private ValuesSourceParser(String aggName, InternalAggregation.Type aggType, SearchContext context, Class<VS> valuesSourceType) {
this.aggName = aggName;
this.aggType = aggType;
this.context = context;
this.valuesSourceType = valuesSourceType;
}
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) {
input.field = parser.text();
} else if (formattable && "format".equals(currentFieldName)) {
input.format = parser.text();
} else if (scriptable) {
if ("script".equals(currentFieldName)) {
input.script = parser.text();
} else if ("lang".equals(currentFieldName)) {
input.lang = parser.text();
} else if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) {
input.valueType = ValueType.resolveForScript(parser.text());
if (targetValueType != null && input.valueType.isNotA(targetValueType)) {
throw new SearchParseException(context, aggType.name() + " aggregation [" + aggName +
"] was configured with an incompatible value type [" + input.valueType + "]. [" + aggType +
"] aggregation can only work on value of type [" + targetValueType + "]");
}
} else {
return false;
}
return true;
} else {
return false;
}
return true;
}
if (scriptable && token == XContentParser.Token.VALUE_BOOLEAN) {
if ("script_values_unique".equals(currentFieldName) || "scriptValuesUnique".equals(currentFieldName)) {
input.assumeUnique = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName) || "scriptValuesSorted".equals(currentFieldName)) {
input.assumeSorted = parser.booleanValue();
} else {
return false;
}
return true;
}
if (scriptable && token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
input.params = parser.map();
return true;
}
return false;
}
return false;
}
public ValuesSourceConfig<VS> config() {
ValueType valueType = input.valueType != null ? input.valueType : targetValueType;
if (input.field == null) {
if (input.script == null) {
return new ValuesSourceConfig(ValuesSource.class);
}
Class valuesSourceType = valueType != null ? (Class<VS>) valueType.getValuesSourceType() : this.valuesSourceType;
if (valuesSourceType == null || valuesSourceType == ValuesSource.class) {
// the specific value source type is undefined, but for scripts, we need to have a specific value source
// type to know how to handle the script values, so we fallback on Bytes
valuesSourceType = ValuesSource.Bytes.class;
}
ValuesSourceConfig<VS> config = new ValuesSourceConfig<VS>(valuesSourceType);
config.format = resolveFormat(input.format, valueType);
config.script = createScript();
config.scriptValueType = valueType;
config.ensureUnique = requiresUniqueValues && !input.assumeUnique;
config.ensureSorted = requiresSortedValues && !input.assumeSorted;
return config;
}
FieldMapper<?> mapper = context.smartNameFieldMapper(input.field);
if (mapper == null) {
Class<VS> valuesSourceType = valueType != null ? (Class<VS>) valueType.getValuesSourceType() : this.valuesSourceType;
ValuesSourceConfig<VS> config = new ValuesSourceConfig<>(valuesSourceType);
config.format = resolveFormat(input.format, valueType);
config.unmapped = true;
if (valueType != null) {
// todo do we really need this for unmapped?
config.scriptValueType = valueType;
}
return config;
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
ValuesSourceConfig config;
if (valuesSourceType == ValuesSource.class) {
if (indexFieldData instanceof IndexNumericFieldData) {
config = new ValuesSourceConfig<>(ValuesSource.Numeric.class);
} else if (indexFieldData instanceof IndexGeoPointFieldData) {
config = new ValuesSourceConfig<>(ValuesSource.GeoPoint.class);
} else {
config = new ValuesSourceConfig<>(ValuesSource.Bytes.class);
}
} else {
config = new ValuesSourceConfig(valuesSourceType);
}
config.fieldContext = new FieldContext(input.field, indexFieldData, mapper);
config.script = createScript();
if (config.script != null) {
config.ensureUnique = requiresUniqueValues && !input.assumeUnique;
config.ensureSorted = requiresSortedValues && !input.assumeSorted;
}
config.format = resolveFormat(input.format, mapper);
return config;
}
private SearchScript createScript() {
return input.script == null ? null : context.scriptService().search(context.lookup(), input.lang, input.script, input.params);
}
private static ValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) {
if (valueType == null) {
return null; // we can't figure it out
}
ValueFormat valueFormat = valueType.defaultFormat;
if (valueFormat != null && valueFormat instanceof ValueFormat.Patternable && format != null) {
return ((ValueFormat.Patternable) valueFormat).create(format);
}
return valueFormat;
}
private static ValueFormat resolveFormat(@Nullable String format, FieldMapper mapper) {
if (mapper instanceof DateFieldMapper) {
return format != null ? ValueFormat.DateTime.format(format) : ValueFormat.DateTime.mapper((DateFieldMapper) mapper);
}
if (mapper instanceof IpFieldMapper) {
return ValueFormat.IPv4;
}
if (mapper instanceof NumberFieldMapper) {
return format != null ? ValueFormat.Number.format(format) : ValueFormat.RAW;
}
return null;
}
public static class Builder<VS extends ValuesSource> {
private final ValuesSourceParser<VS> parser;
private Builder(String aggName, InternalAggregation.Type aggType, SearchContext context, Class<VS> valuesSourceType) {
parser = new ValuesSourceParser<>(aggName, aggType, context, valuesSourceType);
}
public Builder<VS> scriptable(boolean scriptable) {
parser.scriptable = scriptable;
return this;
}
public Builder<VS> formattable(boolean formattable) {
parser.formattable = formattable;
return this;
}
public Builder<VS> targetValueType(ValueType valueType) {
parser.targetValueType = valueType;
return this;
}
public Builder<VS> requiresSortedValues(boolean requiresSortedValues) {
parser.requiresSortedValues = requiresSortedValues;
return this;
}
public Builder<VS> requiresUniqueValues(boolean requiresUniqueValues) {
parser.requiresUniqueValues = requiresUniqueValues;
return this;
}
public ValuesSourceParser<VS> build() {
return parser;
}
}
}

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support.format;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
/**
*
*/
public class ValueFormat {
public static final ValueFormat RAW = new ValueFormat(ValueFormatter.RAW, ValueParser.RAW);
public static final ValueFormat IPv4 = new ValueFormat(ValueFormatter.IPv4, ValueParser.IPv4);
private final ValueFormatter formatter;
private final ValueParser parser;
public ValueFormat(ValueFormatter formatter, ValueParser parser) {
assert formatter != null && parser != null;
this.formatter = formatter;
this.parser = parser;
}
public ValueFormatter formatter() {
return formatter;
}
public ValueParser parser() {
return parser;
}
public abstract static class Patternable<VF extends Patternable<VF>> extends ValueFormat {
private final String pattern;
public Patternable(String pattern, ValueFormatter formatter, ValueParser parser) {
super(formatter, parser);
this.pattern = pattern;
}
public String pattern() {
return pattern;
}
public abstract VF create(String pattern);
}
public static class DateTime extends Patternable<DateTime> {
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT);
public static DateTime format(String format) {
return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format, DateFieldMapper.Defaults.TIME_UNIT));
}
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper));
}
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
super(pattern, formatter, parser);
}
@Override
public DateTime create(String pattern) {
return format(pattern);
}
}
public static class Number extends Patternable<Number> {
public static Number format(String format) {
return new Number(format, new ValueFormatter.Number.Pattern(format), new ValueParser.Number.Pattern(format));
}
public Number(String pattern, ValueFormatter formatter, ValueParser parser) {
super(pattern, formatter, parser);
}
@Override
public Number create(String pattern) {
return format(pattern);
}
}
}

View File

@ -102,6 +102,10 @@ public interface ValueFormatter extends Streamable {
public static final ValueFormatter DEFAULT = new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER); public static final ValueFormatter DEFAULT = new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER);
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.dateTimeFormatter());
}
static final byte ID = 2; static final byte ID = 2;
FormatDateTimeFormatter formatter; FormatDateTimeFormatter formatter;

View File

@ -51,6 +51,8 @@ public interface ValueParser {
*/ */
static class DateTime implements ValueParser { static class DateTime implements ValueParser {
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER);
private FormatDateTimeFormatter formatter; private FormatDateTimeFormatter formatter;
public DateTime(String format) { public DateTime(String format) {
@ -98,6 +100,10 @@ public interface ValueParser {
public double parseDouble(String value, SearchContext searchContext) { public double parseDouble(String value, SearchContext searchContext) {
return parseLong(value, searchContext); return parseLong(value, searchContext);
} }
public static DateMath mapper(DateFieldMapper mapper) {
return new DateMath(new DateMathParser(mapper.dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
}
} }
/** /**
@ -170,4 +176,5 @@ public interface ValueParser {
} }
} }
} }
} }

View File

@ -1166,7 +1166,8 @@ public class DateHistogramTests extends ElasticsearchIntegrationTest {
.field("date") .field("date")
.interval(DateHistogram.Interval.days(interval)) .interval(DateHistogram.Interval.days(interval))
.minDocCount(0) .minDocCount(0)
.extendedBounds(boundsMin, boundsMax) // when explicitly specifying a format, the extended bounds should be defined by the same format
.extendedBounds(format(boundsMin, pattern), format(boundsMax, pattern))
.format(pattern)) .format(pattern))
.execute().actionGet(); .execute().actionGet();

View File

@ -198,6 +198,4 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
assertEquals(4, kellyTerm.getSupersetDf()); assertEquals(4, kellyTerm.getSupersetDf());
} }
} }