Make MappedFieldType responsible for providing a parser/formatter. #17546
Aggregations need to perform instanceof calls on MappedFieldType instances in order to know how they should be parsed or formatted. Instead, we should let the field types provide a formatter/parser that can can be used.
This commit is contained in:
parent
1d0239c125
commit
c33300c543
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
|
@ -790,6 +791,13 @@ public abstract class StreamInput extends InputStream {
|
|||
return readNamedWriteable(Task.Status.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a {@link DocValueFormat} from the current stream.
|
||||
*/
|
||||
public DocValueFormat readValueFormat() throws IOException {
|
||||
return readNamedWriteable(DocValueFormat.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a list of objects
|
||||
*/
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
|
@ -789,4 +790,8 @@ public abstract class StreamOutput extends OutputStream {
|
|||
writeNamedWriteable(sort);
|
||||
}
|
||||
|
||||
/** Writes a {@link DocValueFormat}. */
|
||||
public void writeValueFormat(DocValueFormat format) throws IOException {
|
||||
writeNamedWriteable(format);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -458,4 +459,17 @@ public abstract class MappedFieldType extends FieldType {
|
|||
this.eagerGlobalOrdinals = eagerGlobalOrdinals;
|
||||
}
|
||||
|
||||
/** Return a {@link DocValueFormat} that can be used to display and parse
|
||||
* values as returned by the fielddata API.
|
||||
* The default implementation returns a {@link DocValueFormat#RAW}. */
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (format != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
|
||||
}
|
||||
if (timeZone != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
|
||||
}
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -37,6 +38,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -198,6 +201,18 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.BOOLEAN);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (format != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
|
||||
}
|
||||
if (timeZone != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
|
||||
+ "] does not support custom time zones");
|
||||
}
|
||||
return DocValueFormat.BOOLEAN;
|
||||
}
|
||||
}
|
||||
|
||||
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -53,6 +53,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -496,6 +497,18 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter;
|
||||
if (format != null) {
|
||||
dateTimeFormatter = Joda.forPattern(format);
|
||||
}
|
||||
if (timeZone == null) {
|
||||
timeZone = DateTimeZone.UTC;
|
||||
}
|
||||
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone);
|
||||
}
|
||||
}
|
||||
|
||||
protected DateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -48,6 +49,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
@ -178,6 +181,18 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
|||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (timeZone != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
|
||||
}
|
||||
if (format == null) {
|
||||
return DocValueFormat.RAW;
|
||||
} else {
|
||||
return new DocValueFormat.Decimal(format);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected Boolean includeInAll;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -46,6 +47,8 @@ import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -345,6 +348,18 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
|||
public IndexFieldData.Builder fielddataBuilder() {
|
||||
return new AbstractGeoPointDVIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (format != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
|
||||
}
|
||||
if (timeZone != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
|
||||
+ "] does not support custom time zones");
|
||||
}
|
||||
return DocValueFormat.GEOHASH;
|
||||
}
|
||||
}
|
||||
|
||||
protected DoubleFieldMapper latMapper;
|
||||
|
|
|
@ -55,7 +55,9 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ipv4.InternalIPv4Range;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -285,6 +287,18 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (format != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
|
||||
}
|
||||
if (timeZone != null) {
|
||||
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
|
||||
+ "] does not support custom time zones");
|
||||
}
|
||||
return DocValueFormat.IP;
|
||||
}
|
||||
}
|
||||
|
||||
protected IpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -44,6 +44,9 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
|
@ -141,6 +144,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
private final OldShardsStats oldShardsStats = new OldShardsStats();
|
||||
private final IndexStoreConfig indexStoreConfig;
|
||||
private final MapperRegistry mapperRegistry;
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
private final IndexingMemoryController indexingMemoryController;
|
||||
private final TimeValue cleanInterval;
|
||||
private final IndicesRequestCache indicesRequestCache;
|
||||
|
@ -156,7 +160,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv,
|
||||
ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry,
|
||||
IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
ClusterService clusterService, MapperRegistry mapperRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService) {
|
||||
ClusterService clusterService, MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry,
|
||||
ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService) {
|
||||
super(settings);
|
||||
this.threadPool = threadPool;
|
||||
this.pluginsService = pluginsService;
|
||||
|
@ -170,6 +175,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
this.indicesRequestCache = new IndicesRequestCache(settings);
|
||||
this.indicesQueryCache = new IndicesQueryCache(settings);
|
||||
this.mapperRegistry = mapperRegistry;
|
||||
this.namedWriteableRegistry = namedWriteableRegistry;
|
||||
clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType);
|
||||
clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle);
|
||||
indexingMemoryController = new IndexingMemoryController(settings, threadPool, Iterables.flatten(this));
|
||||
|
@ -981,7 +987,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
if (entity.loaded == false) { // if we have loaded this we don't need to do anything
|
||||
// restore the cached query result into the context
|
||||
final QuerySearchResult result = context.queryResult();
|
||||
result.readFromWithId(context.id(), bytesReference.streamInput());
|
||||
StreamInput in = new NamedWriteableAwareStreamInput(bytesReference.streamInput(), namedWriteableRegistry);
|
||||
result.readFromWithId(context.id(), in);
|
||||
result.shardTarget(context.shardTarget());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,340 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.spatial.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.DecimalFormatSymbols;
|
||||
import java.text.NumberFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/** A formatter for values as returned by the fielddata/doc-values APIs. */
|
||||
public interface DocValueFormat extends NamedWriteable<DocValueFormat> {
|
||||
|
||||
String format(long value);
|
||||
|
||||
String format(double value);
|
||||
|
||||
String format(BytesRef value);
|
||||
|
||||
long parseLong(String value, boolean roundUp, Callable<Long> now);
|
||||
|
||||
double parseDouble(String value, boolean roundUp, Callable<Long> now);
|
||||
|
||||
public static final DocValueFormat RAW = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "raw";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return Long.toString(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return Double.toString(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
return Term.toString(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
double d = Double.parseDouble(value);
|
||||
if (roundUp) {
|
||||
d = Math.ceil(d);
|
||||
} else {
|
||||
d = Math.floor(d);
|
||||
}
|
||||
return Math.round(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
return Double.parseDouble(value);
|
||||
}
|
||||
};
|
||||
|
||||
public static final class DateTime implements DocValueFormat {
|
||||
|
||||
public static final String NAME = "date_time";
|
||||
|
||||
final FormatDateTimeFormatter formatter;
|
||||
final DateTimeZone timeZone;
|
||||
private final DateMathParser parser;
|
||||
|
||||
public DateTime(FormatDateTimeFormatter formatter, DateTimeZone timeZone) {
|
||||
this.formatter = Objects.requireNonNull(formatter);
|
||||
this.timeZone = Objects.requireNonNull(timeZone);
|
||||
this.parser = new DateMathParser(formatter);
|
||||
}
|
||||
|
||||
public DateTime(StreamInput in) throws IOException {
|
||||
this(Joda.forPattern(in.readString()), DateTimeZone.forID(in.readString()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(formatter.format());
|
||||
out.writeString(timeZone.getID());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return formatter.printer().withZone(timeZone).print(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return format((long) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
return parser.parse(value, now, roundUp, timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
return parseLong(value, roundUp, now);
|
||||
}
|
||||
}
|
||||
|
||||
public static final DocValueFormat GEOHASH = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "geo_hash";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return GeoHashUtils.stringEncode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return format((long) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
|
||||
public static final DocValueFormat BOOLEAN = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "bool";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return java.lang.Boolean.valueOf(value != 0).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return java.lang.Boolean.valueOf(value != 0).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
|
||||
public static final DocValueFormat IP = new DocValueFormat() {
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "ip";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return IpFieldMapper.longToIp(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return format((long) value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
return IpFieldMapper.ipToLong(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
return parseLong(value, roundUp, now);
|
||||
}
|
||||
};
|
||||
|
||||
public static final class Decimal implements DocValueFormat {
|
||||
|
||||
public static final String NAME = "decimal";
|
||||
private static final DecimalFormatSymbols SYMBOLS = new DecimalFormatSymbols(Locale.ROOT);
|
||||
|
||||
final String pattern;
|
||||
private final NumberFormat format;
|
||||
|
||||
public Decimal(String pattern) {
|
||||
this.pattern = pattern;
|
||||
this.format = new DecimalFormat(pattern, SYMBOLS);
|
||||
}
|
||||
|
||||
public Decimal(StreamInput in) throws IOException {
|
||||
this(in.readString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(pattern);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return format.format(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return format.format(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(BytesRef value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
Number n;
|
||||
try {
|
||||
n = format.parse(value);
|
||||
} catch (ParseException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
if (format.isParseIntegerOnly()) {
|
||||
return n.longValue();
|
||||
} else {
|
||||
double d = n.doubleValue();
|
||||
if (roundUp) {
|
||||
d = Math.ceil(d);
|
||||
} else {
|
||||
d = Math.floor(d);
|
||||
}
|
||||
return Math.round(d);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
Number n;
|
||||
try {
|
||||
n = format.parse(value);
|
||||
} catch (ParseException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return n.doubleValue();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -276,6 +276,7 @@ public class SearchModule extends AbstractModule {
|
|||
registerBuiltinQueryParsers();
|
||||
registerBuiltinRescorers();
|
||||
registerBuiltinSorts();
|
||||
registerBuiltinValueFormats();
|
||||
}
|
||||
|
||||
public void registerHighlighter(String key, Class<? extends Highlighter> clazz) {
|
||||
|
@ -314,6 +315,15 @@ public class SearchModule extends AbstractModule {
|
|||
return scoreFunctionsRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new ValueFormat.
|
||||
*/
|
||||
// private for now, we can consider making it public if there are actual use cases for plugins
|
||||
// to register custom value formats
|
||||
private void registerValueFormat(String name, Writeable.Reader<? extends DocValueFormat> reader) {
|
||||
namedWriteableRegistry.register(DocValueFormat.class, name, reader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a query.
|
||||
*
|
||||
|
@ -528,6 +538,15 @@ public class SearchModule extends AbstractModule {
|
|||
namedWriteableRegistry.register(ScoreFunctionBuilder.class, WeightBuilder.NAME, WeightBuilder::new);
|
||||
}
|
||||
|
||||
private void registerBuiltinValueFormats() {
|
||||
registerValueFormat(DocValueFormat.BOOLEAN.getWriteableName(), in -> DocValueFormat.BOOLEAN);
|
||||
registerValueFormat(DocValueFormat.DateTime.NAME, DocValueFormat.DateTime::new);
|
||||
registerValueFormat(DocValueFormat.Decimal.NAME, DocValueFormat.Decimal::new);
|
||||
registerValueFormat(DocValueFormat.GEOHASH.getWriteableName(), in -> DocValueFormat.GEOHASH);
|
||||
registerValueFormat(DocValueFormat.IP.getWriteableName(), in -> DocValueFormat.IP);
|
||||
registerValueFormat(DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW);
|
||||
}
|
||||
|
||||
private void registerBuiltinQueryParsers() {
|
||||
registerQuery(MatchQueryBuilder.PROTOTYPE::readFrom, MatchQueryBuilder::fromXContent, MatchQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(MatchPhraseQueryBuilder.PROTOTYPE::readFrom, MatchPhraseQueryBuilder::fromXContent,
|
||||
|
|
|
@ -22,27 +22,26 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class BucketStreamContext implements Streamable {
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat format;
|
||||
private boolean keyed;
|
||||
private Map<String, Object> attributes;
|
||||
|
||||
public BucketStreamContext() {
|
||||
}
|
||||
|
||||
public void formatter(ValueFormatter formatter) {
|
||||
this.formatter = formatter;
|
||||
public void format(DocValueFormat format) {
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
public ValueFormatter formatter() {
|
||||
return formatter;
|
||||
public DocValueFormat format() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public void keyed(boolean keyed) {
|
||||
|
@ -63,14 +62,14 @@ public class BucketStreamContext implements Streamable {
|
|||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
keyed = in.readBoolean();
|
||||
attributes = in.readMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeMap(attributes);
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ public abstract class AbstractHistogramAggregatorFactory<AF extends AbstractHist
|
|||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
Rounding rounding = createRounding();
|
||||
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, extendedBounds, null, config.formatter(),
|
||||
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, extendedBounds, null, config.format(),
|
||||
histogramFactory, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -101,11 +101,11 @@ public abstract class AbstractHistogramAggregatorFactory<AF extends AbstractHist
|
|||
ExtendedBounds roundedBounds = null;
|
||||
if (extendedBounds != null) {
|
||||
// we need to process & validate here using the parser
|
||||
extendedBounds.processAndValidate(name, context.searchContext(), config.parser());
|
||||
extendedBounds.processAndValidate(name, context.searchContext(), config.format());
|
||||
roundedBounds = extendedBounds.round(rounding);
|
||||
}
|
||||
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource,
|
||||
config.formatter(), histogramFactory, context, parent, pipelineAggregators, metaData);
|
||||
config.format(), histogramFactory, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -64,13 +64,14 @@ public class ExtendedBounds implements ToXContent {
|
|||
this.maxAsStr = maxAsStr;
|
||||
}
|
||||
|
||||
void processAndValidate(String aggName, SearchContext context, ValueParser parser) {
|
||||
assert parser != null;
|
||||
void processAndValidate(String aggName, SearchContext context, DocValueFormat format) {
|
||||
assert format != null;
|
||||
if (minAsStr != null) {
|
||||
min = parser.parseLong(minAsStr, context);
|
||||
min = format.parseLong(minAsStr, false, context.nowCallable());
|
||||
}
|
||||
if (maxAsStr != null) {
|
||||
max = parser.parseLong(maxAsStr, context);
|
||||
// TODO: Should we rather pass roundUp=true?
|
||||
max = format.parseLong(maxAsStr, false, context.nowCallable());
|
||||
}
|
||||
if (min != null && max != null && min.compareTo(max) > 0) {
|
||||
throw new SearchParseException(context, "[extended_bounds.min][" + min + "] cannot be greater than " +
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.inject.internal.Nullable;
|
|||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.util.LongHash;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -35,7 +36,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -50,7 +50,7 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
public static final ParseField MIN_DOC_COUNT_FIELD = new ParseField("min_doc_count");
|
||||
|
||||
private final ValuesSource.Numeric valuesSource;
|
||||
private final ValueFormatter formatter;
|
||||
private final DocValueFormat formatter;
|
||||
private final Rounding rounding;
|
||||
private final InternalOrder order;
|
||||
private final boolean keyed;
|
||||
|
@ -63,7 +63,7 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
|
||||
public HistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, InternalOrder order, boolean keyed,
|
||||
long minDocCount, @Nullable ExtendedBounds extendedBounds, @Nullable ValuesSource.Numeric valuesSource,
|
||||
ValueFormatter formatter, InternalHistogram.Factory<?> histogramFactory, AggregationContext aggregationContext,
|
||||
DocValueFormat formatter, InternalHistogram.Factory<?> histogramFactory, AggregationContext aggregationContext,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
|
||||
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -36,18 +36,18 @@ public class InternalDateHistogram {
|
|||
|
||||
static class Bucket extends InternalHistogram.Bucket {
|
||||
|
||||
Bucket(boolean keyed, ValueFormatter formatter, InternalHistogram.Factory<Bucket> factory) {
|
||||
Bucket(boolean keyed, DocValueFormat formatter, InternalHistogram.Factory<Bucket> factory) {
|
||||
super(keyed, formatter, factory);
|
||||
}
|
||||
|
||||
Bucket(long key, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter,
|
||||
Bucket(long key, long docCount, InternalAggregations aggregations, boolean keyed, DocValueFormat formatter,
|
||||
InternalHistogram.Factory<Bucket> factory) {
|
||||
super(key, docCount, keyed, formatter, factory, aggregations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return formatter != null ? formatter.format(key) : ValueFormatter.DateTime.DEFAULT.format(key);
|
||||
return format.format(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -78,12 +78,12 @@ public class InternalDateHistogram {
|
|||
|
||||
@Override
|
||||
public InternalDateHistogram.Bucket createBucket(InternalAggregations aggregations, InternalDateHistogram.Bucket prototype) {
|
||||
return new Bucket(prototype.key, prototype.docCount, aggregations, prototype.getKeyed(), prototype.formatter, this);
|
||||
return new Bucket(prototype.key, prototype.docCount, aggregations, prototype.getKeyed(), prototype.format, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalDateHistogram.Bucket createBucket(Object key, long docCount, InternalAggregations aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat formatter) {
|
||||
if (key instanceof Number) {
|
||||
return new Bucket(((Number) key).longValue(), docCount, aggregations, keyed, formatter, this);
|
||||
} else if (key instanceof DateTime) {
|
||||
|
@ -94,7 +94,7 @@ public class InternalDateHistogram {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected InternalDateHistogram.Bucket createEmptyBucket(boolean keyed, ValueFormatter formatter) {
|
||||
protected InternalDateHistogram.Bucket createEmptyBucket(boolean keyed, DocValueFormat formatter) {
|
||||
return new Bucket(keyed, formatter, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,8 +23,8 @@ import org.apache.lucene.util.PriorityQueue;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
|
@ -35,8 +35,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
|
|||
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -73,7 +71,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
if (factory == null) {
|
||||
throw new IllegalStateException("No factory found for histogram buckets");
|
||||
}
|
||||
Bucket histogram = new Bucket(context.keyed(), context.formatter(), factory);
|
||||
Bucket histogram = new Bucket(context.keyed(), context.format(), factory);
|
||||
histogram.readFrom(in);
|
||||
return histogram;
|
||||
}
|
||||
|
@ -81,7 +79,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
@Override
|
||||
public BucketStreamContext getBucketStreamContext(Bucket bucket) {
|
||||
BucketStreamContext context = new BucketStreamContext();
|
||||
context.formatter(bucket.formatter);
|
||||
context.format(bucket.format);
|
||||
context.keyed(bucket.keyed);
|
||||
return context;
|
||||
}
|
||||
|
@ -99,16 +97,16 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
long docCount;
|
||||
InternalAggregations aggregations;
|
||||
private transient final boolean keyed;
|
||||
protected transient final ValueFormatter formatter;
|
||||
protected transient final DocValueFormat format;
|
||||
private Factory<?> factory;
|
||||
|
||||
public Bucket(boolean keyed, ValueFormatter formatter, Factory<?> factory) {
|
||||
this.formatter = formatter;
|
||||
public Bucket(boolean keyed, DocValueFormat formatter, Factory<?> factory) {
|
||||
this.format = formatter;
|
||||
this.keyed = keyed;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
public Bucket(long key, long docCount, boolean keyed, ValueFormatter formatter, Factory<?> factory,
|
||||
public Bucket(long key, long docCount, boolean keyed, DocValueFormat formatter, Factory<?> factory,
|
||||
InternalAggregations aggregations) {
|
||||
this(keyed, formatter, factory);
|
||||
this.key = key;
|
||||
|
@ -122,7 +120,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return formatter != null ? formatter.format(key) : ValueFormatter.RAW.format(key);
|
||||
return format.format(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -149,25 +147,19 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
aggregations.add((InternalAggregations) bucket.getAggregations());
|
||||
}
|
||||
InternalAggregations aggs = InternalAggregations.reduce(aggregations, context);
|
||||
return (B) getFactory().createBucket(key, docCount, aggs, keyed, formatter);
|
||||
return (B) getFactory().createBucket(key, docCount, aggs, keyed, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (formatter != ValueFormatter.RAW) {
|
||||
Text keyTxt = new Text(formatter.format(key));
|
||||
if (keyed) {
|
||||
builder.startObject(keyTxt.string());
|
||||
} else {
|
||||
builder.startObject();
|
||||
}
|
||||
builder.field(CommonFields.KEY_AS_STRING, keyTxt);
|
||||
String keyAsString = format.format(key);
|
||||
if (keyed) {
|
||||
builder.startObject(keyAsString);
|
||||
} else {
|
||||
if (keyed) {
|
||||
builder.startObject(String.valueOf(getKey()));
|
||||
} else {
|
||||
builder.startObject();
|
||||
}
|
||||
builder.startObject();
|
||||
}
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, keyAsString);
|
||||
}
|
||||
builder.field(CommonFields.KEY, key);
|
||||
builder.field(CommonFields.DOC_COUNT, docCount);
|
||||
|
@ -190,8 +182,8 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
aggregations.writeTo(out);
|
||||
}
|
||||
|
||||
public ValueFormatter getFormatter() {
|
||||
return formatter;
|
||||
public DocValueFormat getFormatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public boolean getKeyed() {
|
||||
|
@ -249,7 +241,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
}
|
||||
|
||||
public InternalHistogram<B> create(String name, List<B> buckets, InternalOrder order, long minDocCount,
|
||||
EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed,
|
||||
EmptyBucketInfo emptyBucketInfo, DocValueFormat formatter, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, pipelineAggregators,
|
||||
|
@ -258,16 +250,16 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
|
||||
public InternalHistogram<B> create(List<B> buckets, InternalHistogram<B> prototype) {
|
||||
return new InternalHistogram<>(prototype.name, buckets, prototype.order, prototype.minDocCount, prototype.emptyBucketInfo,
|
||||
prototype.formatter, prototype.keyed, this, prototype.pipelineAggregators(), prototype.metaData);
|
||||
prototype.format, prototype.keyed, this, prototype.pipelineAggregators(), prototype.metaData);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B createBucket(InternalAggregations aggregations, B prototype) {
|
||||
return (B) new Bucket(prototype.key, prototype.docCount, prototype.getKeyed(), prototype.formatter, this, aggregations);
|
||||
return (B) new Bucket(prototype.key, prototype.docCount, prototype.getKeyed(), prototype.format, this, aggregations);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B createBucket(Object key, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter) {
|
||||
public B createBucket(Object key, long docCount, InternalAggregations aggregations, boolean keyed, DocValueFormat formatter) {
|
||||
if (key instanceof Number) {
|
||||
return (B) new Bucket(((Number) key).longValue(), docCount, keyed, formatter, this, aggregations);
|
||||
} else {
|
||||
|
@ -276,7 +268,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected B createEmptyBucket(boolean keyed, ValueFormatter formatter) {
|
||||
protected B createEmptyBucket(boolean keyed, DocValueFormat formatter) {
|
||||
return (B) new Bucket(keyed, formatter, this);
|
||||
}
|
||||
|
||||
|
@ -284,7 +276,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
|
||||
protected List<B> buckets;
|
||||
private InternalOrder order;
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat format;
|
||||
private boolean keyed;
|
||||
private long minDocCount;
|
||||
private EmptyBucketInfo emptyBucketInfo;
|
||||
|
@ -293,7 +285,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
InternalHistogram() {} // for serialization
|
||||
|
||||
InternalHistogram(String name, List<B> buckets, InternalOrder order, long minDocCount, EmptyBucketInfo emptyBucketInfo,
|
||||
ValueFormatter formatter, boolean keyed, Factory<B> factory, List<PipelineAggregator> pipelineAggregators,
|
||||
DocValueFormat formatter, boolean keyed, Factory<B> factory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.buckets = buckets;
|
||||
|
@ -301,7 +293,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
assert (minDocCount == 0) == (emptyBucketInfo != null);
|
||||
this.minDocCount = minDocCount;
|
||||
this.emptyBucketInfo = emptyBucketInfo;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
this.keyed = keyed;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
@ -421,7 +413,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
while (key <= max) {
|
||||
iter.add(getFactory().createBucket(key, 0,
|
||||
reducedEmptySubAggs,
|
||||
keyed, formatter));
|
||||
keyed, format));
|
||||
key = emptyBucketInfo.rounding.nextRoundingValue(key);
|
||||
}
|
||||
}
|
||||
|
@ -432,7 +424,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
while (key < firstBucket.key) {
|
||||
iter.add(getFactory().createBucket(key, 0,
|
||||
reducedEmptySubAggs,
|
||||
keyed, formatter));
|
||||
keyed, format));
|
||||
key = emptyBucketInfo.rounding.nextRoundingValue(key);
|
||||
}
|
||||
}
|
||||
|
@ -449,7 +441,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
while (key < nextBucket.key) {
|
||||
iter.add(getFactory().createBucket(key, 0,
|
||||
reducedEmptySubAggs, keyed,
|
||||
formatter));
|
||||
format));
|
||||
key = emptyBucketInfo.rounding.nextRoundingValue(key);
|
||||
}
|
||||
assert key == nextBucket.key;
|
||||
|
@ -464,7 +456,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
while (key <= max) {
|
||||
iter.add(getFactory().createBucket(key, 0,
|
||||
reducedEmptySubAggs, keyed,
|
||||
formatter));
|
||||
format));
|
||||
key = emptyBucketInfo.rounding.nextRoundingValue(key);
|
||||
}
|
||||
}
|
||||
|
@ -493,7 +485,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
CollectionUtil.introSort(reducedBuckets, order.comparator());
|
||||
}
|
||||
|
||||
return getFactory().create(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, formatter, keyed, pipelineAggregators(),
|
||||
return getFactory().create(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, pipelineAggregators(),
|
||||
getMetaData());
|
||||
}
|
||||
|
||||
|
@ -505,12 +497,12 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
if (minDocCount == 0) {
|
||||
emptyBucketInfo = EmptyBucketInfo.readFrom(in);
|
||||
}
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
keyed = in.readBoolean();
|
||||
int size = in.readVInt();
|
||||
List<B> buckets = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
B bucket = getFactory().createEmptyBucket(keyed, formatter);
|
||||
B bucket = getFactory().createEmptyBucket(keyed, format);
|
||||
bucket.readFrom(in);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
|
@ -536,7 +528,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
|
|||
if (minDocCount == 0) {
|
||||
EmptyBucketInfo.writeTo(emptyBucketInfo, out);
|
||||
}
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(buckets.size());
|
||||
for (B bucket : buckets) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.range;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -31,8 +32,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -61,7 +60,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket(context.keyed(), context.formatter());
|
||||
Bucket buckets = new Bucket(context.keyed(), context.format());
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -69,7 +68,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
@Override
|
||||
public BucketStreamContext getBucketStreamContext(Bucket bucket) {
|
||||
BucketStreamContext context = new BucketStreamContext();
|
||||
context.formatter(bucket.formatter);
|
||||
context.format(bucket.format);
|
||||
context.keyed(bucket.keyed);
|
||||
return context;
|
||||
}
|
||||
|
@ -83,20 +82,20 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Range.Bucket {
|
||||
|
||||
protected transient final boolean keyed;
|
||||
protected transient final ValueFormatter formatter;
|
||||
protected transient final DocValueFormat format;
|
||||
protected double from;
|
||||
protected double to;
|
||||
private long docCount;
|
||||
InternalAggregations aggregations;
|
||||
private String key;
|
||||
|
||||
public Bucket(boolean keyed, ValueFormatter formatter) {
|
||||
public Bucket(boolean keyed, DocValueFormat formatter) {
|
||||
this.keyed = keyed;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat formatter) {
|
||||
this(keyed, formatter);
|
||||
this.key = key != null ? key : generateKey(from, to, formatter);
|
||||
this.from = from;
|
||||
|
@ -129,8 +128,8 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
return keyed;
|
||||
}
|
||||
|
||||
public ValueFormatter getFormatter() {
|
||||
return formatter;
|
||||
public DocValueFormat getFormat() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -138,7 +137,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
if (Double.isInfinite(from)) {
|
||||
return null;
|
||||
} else {
|
||||
return formatter.format(from);
|
||||
return format.format(from);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,7 +146,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
if (Double.isInfinite(to)) {
|
||||
return null;
|
||||
} else {
|
||||
return formatter.format(to);
|
||||
return format.format(to);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,7 +172,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
aggregationsList.add(range.aggregations);
|
||||
}
|
||||
final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context);
|
||||
return getFactory().createBucket(key, from, to, docCount, aggs, keyed, formatter);
|
||||
return getFactory().createBucket(key, from, to, docCount, aggs, keyed, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -186,14 +185,14 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
}
|
||||
if (!Double.isInfinite(from)) {
|
||||
builder.field(CommonFields.FROM, from);
|
||||
if (formatter != null) {
|
||||
builder.field(CommonFields.FROM_AS_STRING, formatter.format(from));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.FROM_AS_STRING, format.format(from));
|
||||
}
|
||||
}
|
||||
if (!Double.isInfinite(to)) {
|
||||
builder.field(CommonFields.TO, to);
|
||||
if (formatter != null) {
|
||||
builder.field(CommonFields.TO_AS_STRING, formatter.format(to));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.TO_AS_STRING, format.format(to));
|
||||
}
|
||||
}
|
||||
builder.field(CommonFields.DOC_COUNT, docCount);
|
||||
|
@ -202,7 +201,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
return builder;
|
||||
}
|
||||
|
||||
protected String generateKey(double from, double to, ValueFormatter formatter) {
|
||||
protected String generateKey(double from, double to, DocValueFormat formatter) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(Double.isInfinite(from) ? "*" : formatter.format(from));
|
||||
sb.append("-");
|
||||
|
@ -236,43 +235,43 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public R create(String name, List<B> ranges, ValueFormatter formatter, boolean keyed, List<PipelineAggregator> pipelineAggregators,
|
||||
public R create(String name, List<B> ranges, DocValueFormat formatter, boolean keyed, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
return (R) new InternalRange<>(name, ranges, formatter, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat formatter) {
|
||||
return (B) new Bucket(key, from, to, docCount, aggregations, keyed, formatter);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public R create(List<B> ranges, R prototype) {
|
||||
return (R) new InternalRange<>(prototype.name, ranges, prototype.formatter, prototype.keyed, prototype.pipelineAggregators(),
|
||||
return (R) new InternalRange<>(prototype.name, ranges, prototype.format, prototype.keyed, prototype.pipelineAggregators(),
|
||||
prototype.metaData);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B createBucket(InternalAggregations aggregations, B prototype) {
|
||||
return (B) new Bucket(prototype.getKey(), prototype.from, prototype.to, prototype.getDocCount(), aggregations, prototype.keyed,
|
||||
prototype.formatter);
|
||||
prototype.format);
|
||||
}
|
||||
}
|
||||
|
||||
private List<B> ranges;
|
||||
private Map<String, B> rangeMap;
|
||||
protected ValueFormatter formatter;
|
||||
protected DocValueFormat format;
|
||||
protected boolean keyed;
|
||||
|
||||
public InternalRange() {} // for serialization
|
||||
|
||||
public InternalRange(String name, List<B> ranges, ValueFormatter formatter, boolean keyed,
|
||||
public InternalRange(String name, List<B> ranges, DocValueFormat format, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.ranges = ranges;
|
||||
this.formatter = formatter;
|
||||
this.format = format;
|
||||
this.keyed = keyed;
|
||||
}
|
||||
|
||||
|
@ -320,18 +319,18 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
for (int i = 0; i < this.ranges.size(); ++i) {
|
||||
ranges.add((B) rangeList[i].get(0).reduce(rangeList[i], reduceContext));
|
||||
}
|
||||
return getFactory().create(name, ranges, formatter, keyed, pipelineAggregators(), getMetaData());
|
||||
return getFactory().create(name, ranges, format, keyed, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
keyed = in.readBoolean();
|
||||
int size = in.readVInt();
|
||||
List<B> ranges = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
String key = in.readOptionalString();
|
||||
ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(), InternalAggregations.readAggregations(in), keyed, formatter));
|
||||
ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(), InternalAggregations.readAggregations(in), keyed, format));
|
||||
}
|
||||
this.ranges = ranges;
|
||||
this.rangeMap = null;
|
||||
|
@ -339,7 +338,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(ranges.size());
|
||||
for (B bucket : ranges) {
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -40,9 +41,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -97,15 +95,15 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
return "[" + from + " to " + to + ")";
|
||||
}
|
||||
|
||||
public Range process(ValueParser parser, SearchContext context) {
|
||||
public Range process(DocValueFormat parser, SearchContext context) {
|
||||
assert parser != null;
|
||||
Double from = this.from;
|
||||
Double to = this.to;
|
||||
if (fromAsStr != null) {
|
||||
from = parser.parseDouble(fromAsStr, context);
|
||||
from = parser.parseDouble(fromAsStr, false, context.nowCallable());
|
||||
}
|
||||
if (toAsStr != null) {
|
||||
to = parser.parseDouble(toAsStr, context);
|
||||
to = parser.parseDouble(toAsStr, false, context.nowCallable());
|
||||
}
|
||||
return new Range(key, from, fromAsStr, to, toAsStr);
|
||||
}
|
||||
|
@ -205,28 +203,27 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
}
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat format;
|
||||
final Range[] ranges;
|
||||
final boolean keyed;
|
||||
final InternalRange.Factory rangeFactory;
|
||||
|
||||
final double[] maxTo;
|
||||
|
||||
public RangeAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormat format,
|
||||
public RangeAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, DocValueFormat format,
|
||||
InternalRange.Factory rangeFactory, List<? extends Range> ranges, boolean keyed, AggregationContext aggregationContext,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
|
||||
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
assert valuesSource != null;
|
||||
this.valuesSource = valuesSource;
|
||||
this.formatter = format.formatter();
|
||||
this.format = format;
|
||||
this.keyed = keyed;
|
||||
this.rangeFactory = rangeFactory;
|
||||
|
||||
this.ranges = new Range[ranges.size()];
|
||||
ValueParser parser = format != null ? format.parser() : ValueParser.RAW;
|
||||
for (int i = 0; i < this.ranges.length; i++) {
|
||||
this.ranges[i] = ranges.get(i).process(parser, context.searchContext());
|
||||
this.ranges[i] = ranges.get(i).process(format, context.searchContext());
|
||||
}
|
||||
sortRanges(this.ranges);
|
||||
|
||||
|
@ -320,11 +317,11 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
Range range = ranges[i];
|
||||
final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i);
|
||||
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket =
|
||||
rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed, formatter);
|
||||
rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed, format);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
// value source can be null in the case of unmapped fields
|
||||
return rangeFactory.create(name, buckets, formatter, keyed, pipelineAggregators(), metaData());
|
||||
return rangeFactory.create(name, buckets, format, keyed, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -334,11 +331,11 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
for (int i = 0; i < ranges.length; i++) {
|
||||
Range range = ranges[i];
|
||||
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket =
|
||||
rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, formatter);
|
||||
rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, format);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
// value source can be null in the case of unmapped fields
|
||||
return rangeFactory.create(name, buckets, formatter, keyed, pipelineAggregators(), metaData());
|
||||
return rangeFactory.create(name, buckets, format, keyed, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
private static final void sortRanges(final Range[] ranges) {
|
||||
|
@ -367,22 +364,21 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
private final List<R> ranges;
|
||||
private final boolean keyed;
|
||||
private final InternalRange.Factory factory;
|
||||
private final ValueFormatter formatter;
|
||||
private final DocValueFormat format;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Unmapped(String name, List<R> ranges, boolean keyed, ValueFormat format,
|
||||
public Unmapped(String name, List<R> ranges, boolean keyed, DocValueFormat format,
|
||||
AggregationContext context,
|
||||
Aggregator parent, InternalRange.Factory factory, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.ranges = new ArrayList<>();
|
||||
ValueParser parser = format != null ? format.parser() : ValueParser.RAW;
|
||||
for (R range : ranges) {
|
||||
this.ranges.add((R) range.process(parser, context.searchContext()));
|
||||
this.ranges.add((R) range.process(format, context.searchContext()));
|
||||
}
|
||||
this.keyed = keyed;
|
||||
this.formatter = format.formatter();
|
||||
this.format = format;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
|
@ -391,9 +387,9 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
InternalAggregations subAggs = buildEmptySubAggregations();
|
||||
List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.size());
|
||||
for (RangeAggregator.Range range : ranges) {
|
||||
buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, formatter));
|
||||
buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, format));
|
||||
}
|
||||
return factory.create(name, buckets, formatter, keyed, pipelineAggregators(), metaData());
|
||||
return factory.create(name, buckets, format, keyed, pipelineAggregators(), metaData());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -54,7 +54,7 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
|||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket(context.keyed(), context.formatter());
|
||||
Bucket buckets = new Bucket(context.keyed(), context.format());
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
|||
@Override
|
||||
public BucketStreamContext getBucketStreamContext(Bucket bucket) {
|
||||
BucketStreamContext context = new BucketStreamContext();
|
||||
context.formatter(bucket.formatter());
|
||||
context.format(bucket.format());
|
||||
context.keyed(bucket.keyed());
|
||||
return context;
|
||||
}
|
||||
|
@ -77,15 +77,15 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
|||
|
||||
public static class Bucket extends InternalRange.Bucket {
|
||||
|
||||
public Bucket(boolean keyed, ValueFormatter formatter) {
|
||||
public Bucket(boolean keyed, DocValueFormat formatter) {
|
||||
super(keyed, formatter);
|
||||
}
|
||||
|
||||
public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed, ValueFormatter formatter) {
|
||||
public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed, DocValueFormat formatter) {
|
||||
super(key, from, to, docCount, new InternalAggregations(aggregations), keyed, formatter);
|
||||
}
|
||||
|
||||
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter) {
|
||||
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, DocValueFormat formatter) {
|
||||
super(key, from, to, docCount, aggregations, keyed, formatter);
|
||||
}
|
||||
|
||||
|
@ -108,8 +108,8 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
|||
return keyed;
|
||||
}
|
||||
|
||||
ValueFormatter formatter() {
|
||||
return formatter;
|
||||
DocValueFormat format() {
|
||||
return format;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,32 +126,32 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
|||
}
|
||||
|
||||
@Override
|
||||
public InternalDateRange create(String name, List<InternalDateRange.Bucket> ranges, ValueFormatter formatter, boolean keyed,
|
||||
public InternalDateRange create(String name, List<InternalDateRange.Bucket> ranges, DocValueFormat formatter, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalDateRange(name, ranges, formatter, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalDateRange create(List<Bucket> ranges, InternalDateRange prototype) {
|
||||
return new InternalDateRange(prototype.name, ranges, prototype.formatter, prototype.keyed, prototype.pipelineAggregators(),
|
||||
return new InternalDateRange(prototype.name, ranges, prototype.format, prototype.keyed, prototype.pipelineAggregators(),
|
||||
prototype.metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter) {
|
||||
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, DocValueFormat formatter) {
|
||||
return new Bucket(key, from, to, docCount, aggregations, keyed, formatter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
||||
return new Bucket(prototype.getKey(), ((Number) prototype.getFrom()).doubleValue(), ((Number) prototype.getTo()).doubleValue(),
|
||||
prototype.getDocCount(), aggregations, prototype.getKeyed(), prototype.getFormatter());
|
||||
prototype.getDocCount(), aggregations, prototype.getKeyed(), prototype.getFormat());
|
||||
}
|
||||
}
|
||||
|
||||
InternalDateRange() {} // for serialization
|
||||
|
||||
InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, ValueFormatter formatter, boolean keyed,
|
||||
InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, DocValueFormat formatter, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, ranges, formatter, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
@ -28,7 +29,6 @@ import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -53,7 +53,7 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
|||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket(context.keyed(), context.formatter());
|
||||
Bucket buckets = new Bucket(context.keyed());
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
|||
@Override
|
||||
public BucketStreamContext getBucketStreamContext(Bucket bucket) {
|
||||
BucketStreamContext context = new BucketStreamContext();
|
||||
context.formatter(bucket.formatter());
|
||||
context.format(DocValueFormat.RAW);
|
||||
context.keyed(bucket.keyed());
|
||||
return context;
|
||||
}
|
||||
|
@ -76,17 +76,16 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
|||
|
||||
static class Bucket extends InternalRange.Bucket {
|
||||
|
||||
Bucket(boolean keyed, ValueFormatter formatter) {
|
||||
super(keyed, formatter);
|
||||
Bucket(boolean keyed) {
|
||||
super(keyed, DocValueFormat.RAW);
|
||||
}
|
||||
|
||||
Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
this(key, from, to, docCount, new InternalAggregations(aggregations), keyed, formatter);
|
||||
Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed) {
|
||||
this(key, from, to, docCount, new InternalAggregations(aggregations), keyed);
|
||||
}
|
||||
|
||||
Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter) {
|
||||
super(key, from, to, docCount, aggregations, keyed, formatter);
|
||||
Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed) {
|
||||
super(key, from, to, docCount, aggregations, keyed, DocValueFormat.RAW);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -97,10 +96,6 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
|||
boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
ValueFormatter formatter() {
|
||||
return formatter;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Factory extends InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> {
|
||||
|
@ -121,36 +116,36 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
|||
}
|
||||
|
||||
@Override
|
||||
public InternalGeoDistance create(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed,
|
||||
public InternalGeoDistance create(String name, List<Bucket> ranges, DocValueFormat format, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalGeoDistance(name, ranges, formatter, keyed, pipelineAggregators, metaData);
|
||||
return new InternalGeoDistance(name, ranges, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalGeoDistance create(List<Bucket> ranges, InternalGeoDistance prototype) {
|
||||
return new InternalGeoDistance(prototype.name, ranges, prototype.formatter, prototype.keyed, prototype.pipelineAggregators(),
|
||||
return new InternalGeoDistance(prototype.name, ranges, prototype.keyed, prototype.pipelineAggregators(),
|
||||
prototype.metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
return new Bucket(key, from, to, docCount, aggregations, keyed, formatter);
|
||||
DocValueFormat format) {
|
||||
return new Bucket(key, from, to, docCount, aggregations, keyed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
||||
return new Bucket(prototype.getKey(), ((Number) prototype.getFrom()).doubleValue(), ((Number) prototype.getTo()).doubleValue(),
|
||||
prototype.getDocCount(), aggregations, prototype.getKeyed(), prototype.getFormatter());
|
||||
prototype.getDocCount(), aggregations, prototype.getKeyed());
|
||||
}
|
||||
}
|
||||
|
||||
InternalGeoDistance() {} // for serialization
|
||||
|
||||
public InternalGeoDistance(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed,
|
||||
public InternalGeoDistance(String name, List<Bucket> ranges, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, ranges, formatter, keyed, pipelineAggregators, metaData);
|
||||
super(name, ranges, DocValueFormat.RAW, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,13 +27,13 @@ import org.elasticsearch.common.network.Cidrs;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -171,16 +171,16 @@ public class IPv4RangeAggregatorBuilder extends AbstractRangeBuilder<IPv4RangeAg
|
|||
}
|
||||
|
||||
@Override
|
||||
public Range process(ValueParser parser, SearchContext context) {
|
||||
public Range process(DocValueFormat parser, SearchContext context) {
|
||||
assert parser != null;
|
||||
Double from = this.from;
|
||||
Double to = this.to;
|
||||
String key = this.key;
|
||||
if (fromAsStr != null) {
|
||||
from = parser.parseDouble(fromAsStr, context);
|
||||
from = parser.parseDouble(fromAsStr, false, context.nowCallable());
|
||||
}
|
||||
if (toAsStr != null) {
|
||||
to = parser.parseDouble(toAsStr, context);
|
||||
to = parser.parseDouble(toAsStr, false, context.nowCallable());
|
||||
}
|
||||
if (cidr != null) {
|
||||
long[] fromTo = Cidrs.cidrMaskToMinMax(cidr);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -76,27 +76,27 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, I
|
|||
public static class Bucket extends InternalRange.Bucket {
|
||||
|
||||
public Bucket(boolean keyed) {
|
||||
super(keyed, ValueFormatter.IPv4);
|
||||
super(keyed, DocValueFormat.IP);
|
||||
}
|
||||
|
||||
public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed) {
|
||||
super(key, from, to, docCount, new InternalAggregations(aggregations), keyed, ValueFormatter.IPv4);
|
||||
super(key, from, to, docCount, new InternalAggregations(aggregations), keyed, DocValueFormat.IP);
|
||||
}
|
||||
|
||||
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed) {
|
||||
super(key, from, to, docCount, aggregations, keyed, ValueFormatter.IPv4);
|
||||
super(key, from, to, docCount, aggregations, keyed, DocValueFormat.IP);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFromAsString() {
|
||||
double from = ((Number) this.from).doubleValue();
|
||||
return Double.isInfinite(from) ? null : from == 0 ? null : ValueFormatter.IPv4.format(from);
|
||||
return Double.isInfinite(from) ? null : from == 0 ? null : DocValueFormat.IP.format(from);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getToAsString() {
|
||||
double to = ((Number) this.to).doubleValue();
|
||||
return Double.isInfinite(to) ? null : MAX_IP == to ? null : ValueFormatter.IPv4.format(to);
|
||||
return Double.isInfinite(to) ? null : MAX_IP == to ? null : DocValueFormat.IP.format(to);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -122,7 +122,7 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, I
|
|||
}
|
||||
|
||||
@Override
|
||||
public InternalIPv4Range create(String name, List<Bucket> ranges, ValueFormatter formatter, boolean keyed,
|
||||
public InternalIPv4Range create(String name, List<Bucket> ranges, DocValueFormat formatter, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalIPv4Range(name, ranges, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, I
|
|||
|
||||
@Override
|
||||
public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat formatter) {
|
||||
return new Bucket(key, from, to, docCount, aggregations, keyed);
|
||||
}
|
||||
|
||||
|
@ -149,7 +149,7 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, I
|
|||
|
||||
public InternalIPv4Range(String name, List<InternalIPv4Range.Bucket> ranges, boolean keyed, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, ranges, ValueFormatter.IPv4, keyed, pipelineAggregators, metaData);
|
||||
super(name, ranges, DocValueFormat.IP, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
|
||||
|
@ -28,14 +29,13 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
|||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -57,7 +57,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket((long) context.attributes().get("subsetSize"), (long) context.attributes().get("supersetSize"),
|
||||
context.formatter());
|
||||
context.format());
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
attributes.put("subsetSize", bucket.subsetSize);
|
||||
attributes.put("supersetSize", bucket.supersetSize);
|
||||
context.attributes(attributes);
|
||||
context.formatter(bucket.formatter);
|
||||
context.format(bucket.format);
|
||||
return context;
|
||||
}
|
||||
};
|
||||
|
@ -82,18 +82,18 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
static class Bucket extends InternalSignificantTerms.Bucket {
|
||||
|
||||
long term;
|
||||
private transient final ValueFormatter formatter;
|
||||
private transient final DocValueFormat format;
|
||||
|
||||
public Bucket(long subsetSize, long supersetSize, ValueFormatter formatter) {
|
||||
public Bucket(long subsetSize, long supersetSize, DocValueFormat formatter) {
|
||||
super(subsetSize, supersetSize);
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
// for serialization
|
||||
}
|
||||
|
||||
public Bucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, long term, InternalAggregations aggregations,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat format) {
|
||||
super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations);
|
||||
this.formatter = formatter;
|
||||
this.format = format;
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
|
||||
@Override
|
||||
Bucket newBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations) {
|
||||
return new Bucket(subsetDf, subsetSize, supersetDf, supersetSize, term, aggregations, formatter);
|
||||
return new Bucket(subsetDf, subsetSize, supersetDf, supersetSize, term, aggregations, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -150,8 +150,8 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CommonFields.KEY, term);
|
||||
if (formatter != null) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, formatter.format(term));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, format.format(term));
|
||||
}
|
||||
builder.field(CommonFields.DOC_COUNT, getDocCount());
|
||||
builder.field("score", score);
|
||||
|
@ -161,17 +161,17 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
return builder;
|
||||
}
|
||||
}
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat format;
|
||||
|
||||
SignificantLongTerms() {
|
||||
} // for serialization
|
||||
|
||||
public SignificantLongTerms(long subsetSize, long supersetSize, String name, ValueFormatter formatter, int requiredSize,
|
||||
public SignificantLongTerms(long subsetSize, long supersetSize, String name, DocValueFormat format, int requiredSize,
|
||||
long minDocCount, SignificanceHeuristic significanceHeuristic, List<? extends InternalSignificantTerms.Bucket> buckets,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
|
||||
super(subsetSize, supersetSize, name, requiredSize, minDocCount, significanceHeuristic, buckets, pipelineAggregators, metaData);
|
||||
this.formatter = formatter;
|
||||
this.format = Objects.requireNonNull(format);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -181,28 +181,28 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
|
||||
@Override
|
||||
public SignificantLongTerms create(List<SignificantLongTerms.Bucket> buckets) {
|
||||
return new SignificantLongTerms(this.subsetSize, this.supersetSize, this.name, this.formatter, this.requiredSize, this.minDocCount,
|
||||
return new SignificantLongTerms(this.subsetSize, this.supersetSize, this.name, this.format, this.requiredSize, this.minDocCount,
|
||||
this.significanceHeuristic, buckets, this.pipelineAggregators(), this.metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) {
|
||||
return new Bucket(prototype.subsetDf, prototype.subsetSize, prototype.supersetDf, prototype.supersetSize, prototype.term,
|
||||
aggregations, prototype.formatter);
|
||||
aggregations, prototype.format);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SignificantLongTerms create(long subsetSize, long supersetSize,
|
||||
List<org.elasticsearch.search.aggregations.bucket.significant.InternalSignificantTerms.Bucket> buckets,
|
||||
InternalSignificantTerms prototype) {
|
||||
return new SignificantLongTerms(subsetSize, supersetSize, prototype.getName(), ((SignificantLongTerms) prototype).formatter,
|
||||
return new SignificantLongTerms(subsetSize, supersetSize, prototype.getName(), ((SignificantLongTerms) prototype).format,
|
||||
prototype.requiredSize, prototype.minDocCount, prototype.significanceHeuristic, buckets, prototype.pipelineAggregators(),
|
||||
prototype.getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
this.formatter = ValueFormatterStreams.readOptional(in);
|
||||
this.format = in.readValueFormat();
|
||||
this.requiredSize = readSize(in);
|
||||
this.minDocCount = in.readVLong();
|
||||
this.subsetSize = in.readVLong();
|
||||
|
@ -212,7 +212,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
int size = in.readVInt();
|
||||
List<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
Bucket bucket = new Bucket(subsetSize, supersetSize, formatter);
|
||||
Bucket bucket = new Bucket(subsetSize, supersetSize, format);
|
||||
bucket.readFrom(in);
|
||||
buckets.add(bucket);
|
||||
|
||||
|
@ -223,7 +223,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
writeSize(requiredSize, out);
|
||||
out.writeVLong(minDocCount);
|
||||
out.writeVLong(subsetSize);
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -31,7 +32,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -46,7 +46,7 @@ import java.util.Map;
|
|||
public class SignificantLongTermsAggregator extends LongTermsAggregator {
|
||||
|
||||
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource,
|
||||
ValueFormat format, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||
DocValueFormat format, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||
SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory,
|
||||
IncludeExclude.LongFilter includeExclude,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
|
@ -90,7 +90,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
|
|||
continue;
|
||||
}
|
||||
if (spare == null) {
|
||||
spare = new SignificantLongTerms.Bucket(0, 0, 0, 0, 0, null, formatter);
|
||||
spare = new SignificantLongTerms.Bucket(0, 0, 0, 0, 0, null, format);
|
||||
}
|
||||
spare.term = bucketOrds.get(i);
|
||||
spare.subsetDf = docCount;
|
||||
|
@ -111,7 +111,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
|
|||
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
|
||||
list[i] = bucket;
|
||||
}
|
||||
return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(),
|
||||
return new SignificantLongTerms(subsetSize, supersetSize, name, format, bucketCountThresholds.getRequiredSize(),
|
||||
bucketCountThresholds.getMinDocCount(), significanceHeuristic, Arrays.asList(list), pipelineAggregators(),
|
||||
metaData());
|
||||
}
|
||||
|
@ -122,7 +122,7 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
|
|||
ContextIndexSearcher searcher = context.searchContext().searcher();
|
||||
IndexReader topReader = searcher.getIndexReader();
|
||||
int supersetSize = topReader.numDocs();
|
||||
return new SignificantLongTerms(0, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(),
|
||||
return new SignificantLongTerms(0, supersetSize, name, format, bucketCountThresholds.getRequiredSize(),
|
||||
bucketCountThresholds.getMinDocCount(), significanceHeuristic,
|
||||
Collections.<InternalSignificantTerms.Bucket> emptyList(), pipelineAggregators(), metaData());
|
||||
}
|
||||
|
|
|
@ -22,13 +22,12 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
|
||||
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -55,7 +54,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket(context.formatter(), (boolean) context.attributes().get("showDocCountError"));
|
||||
Bucket buckets = new Bucket(context.format(), (boolean) context.attributes().get("showDocCountError"));
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -66,7 +65,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
Map<String, Object> attributes = new HashMap<>();
|
||||
attributes.put("showDocCountError", bucket.showDocCountError);
|
||||
context.attributes(attributes);
|
||||
context.formatter(bucket.formatter);
|
||||
context.format(bucket.format);
|
||||
return context;
|
||||
}
|
||||
};
|
||||
|
@ -80,13 +79,13 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
|
||||
double term;
|
||||
|
||||
public Bucket(ValueFormatter formatter, boolean showDocCountError) {
|
||||
super(formatter, showDocCountError);
|
||||
public Bucket(DocValueFormat format, boolean showDocCountError) {
|
||||
super(format, showDocCountError);
|
||||
}
|
||||
|
||||
public Bucket(double term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError,
|
||||
ValueFormatter formatter) {
|
||||
super(docCount, aggregations, showDocCountError, docCountError, formatter);
|
||||
DocValueFormat format) {
|
||||
super(docCount, aggregations, showDocCountError, docCountError, format);
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
|
@ -112,7 +111,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
|
||||
@Override
|
||||
Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError) {
|
||||
return new Bucket(term, docCount, aggs, showDocCountError, docCountError, formatter);
|
||||
return new Bucket(term, docCount, aggs, showDocCountError, docCountError, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -140,8 +139,8 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CommonFields.KEY, term);
|
||||
if (formatter != ValueFormatter.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, formatter.format(term));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, format.format(term));
|
||||
}
|
||||
builder.field(CommonFields.DOC_COUNT, getDocCount());
|
||||
if (showDocCountError) {
|
||||
|
@ -153,17 +152,17 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
}
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat format;
|
||||
|
||||
DoubleTerms() {
|
||||
} // for serialization
|
||||
|
||||
public DoubleTerms(String name, Terms.Order order, ValueFormatter formatter, int requiredSize, int shardSize,
|
||||
public DoubleTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize,
|
||||
long minDocCount, List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError,
|
||||
long otherDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
||||
metaData);
|
||||
this.formatter = formatter;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -173,20 +172,20 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
|
||||
@Override
|
||||
public DoubleTerms create(List<Bucket> buckets) {
|
||||
return new DoubleTerms(this.name, this.order, this.formatter, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
||||
return new DoubleTerms(this.name, this.order, this.format, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
||||
this.showTermDocCountError, this.docCountError, this.otherDocCount, this.pipelineAggregators(), this.metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
||||
return new Bucket(prototype.term, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError,
|
||||
prototype.formatter);
|
||||
prototype.format);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DoubleTerms create(String name, List<org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket> buckets,
|
||||
long docCountError, long otherDocCount, InternalTerms prototype) {
|
||||
return new DoubleTerms(name, prototype.order, ((DoubleTerms) prototype).formatter, prototype.requiredSize, prototype.shardSize,
|
||||
return new DoubleTerms(name, prototype.order, ((DoubleTerms) prototype).format, prototype.requiredSize, prototype.shardSize,
|
||||
prototype.minDocCount, buckets, prototype.showTermDocCountError, docCountError, otherDocCount, prototype.pipelineAggregators(),
|
||||
prototype.getMetaData());
|
||||
}
|
||||
|
@ -195,7 +194,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
this.docCountError = in.readLong();
|
||||
this.order = InternalOrder.Streams.readOrder(in);
|
||||
this.formatter = ValueFormatterStreams.readOptional(in);
|
||||
this.format = in.readValueFormat();
|
||||
this.requiredSize = readSize(in);
|
||||
this.shardSize = readSize(in);
|
||||
this.showTermDocCountError = in.readBoolean();
|
||||
|
@ -204,7 +203,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
int size = in.readVInt();
|
||||
List<InternalTerms.Bucket> buckets = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
Bucket bucket = new Bucket(formatter, showTermDocCountError);
|
||||
Bucket bucket = new Bucket(format, showTermDocCountError);
|
||||
bucket.readFrom(in);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
|
@ -216,7 +215,7 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
|||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(docCountError);
|
||||
InternalOrder.Streams.writeOrder(order, out);
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
writeSize(requiredSize, out);
|
||||
writeSize(shardSize, out);
|
||||
out.writeBoolean(showTermDocCountError);
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.index.fielddata.FieldData;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
|
@ -29,7 +30,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -41,7 +41,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class DoubleTermsAggregator extends LongTermsAggregator {
|
||||
|
||||
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormat format,
|
||||
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, DocValueFormat format,
|
||||
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||
SubAggCollectionMode collectionMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
|
@ -69,7 +69,7 @@ public class DoubleTermsAggregator extends LongTermsAggregator {
|
|||
private static DoubleTerms.Bucket convertToDouble(InternalTerms.Bucket bucket) {
|
||||
final long term = ((Number) bucket.getKey()).longValue();
|
||||
final double value = NumericUtils.sortableLongToDouble(term);
|
||||
return new DoubleTerms.Bucket(value, bucket.docCount, bucket.aggregations, bucket.showDocCountError, bucket.docCountError, bucket.formatter);
|
||||
return new DoubleTerms.Bucket(value, bucket.docCount, bucket.aggregations, bucket.showDocCountError, bucket.docCountError, bucket.format);
|
||||
}
|
||||
|
||||
private static DoubleTerms convertToDouble(LongTerms terms) {
|
||||
|
@ -77,7 +77,7 @@ public class DoubleTermsAggregator extends LongTermsAggregator {
|
|||
for (int i = 0; i < buckets.length; ++i) {
|
||||
buckets[i] = convertToDouble(buckets[i]);
|
||||
}
|
||||
return new DoubleTerms(terms.getName(), terms.order, terms.formatter, terms.requiredSize, terms.shardSize, terms.minDocCount,
|
||||
return new DoubleTerms(terms.getName(), terms.order, terms.format, terms.requiredSize, terms.shardSize, terms.minDocCount,
|
||||
Arrays.asList(buckets), terms.showTermDocCountError, terms.docCountError, terms.otherDocCount, terms.pipelineAggregators(),
|
||||
terms.getMetaData());
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
|
|||
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
|
|||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -52,16 +52,16 @@ public abstract class InternalTerms<A extends InternalTerms, B extends InternalT
|
|||
protected long docCountError;
|
||||
protected InternalAggregations aggregations;
|
||||
protected boolean showDocCountError;
|
||||
transient final ValueFormatter formatter;
|
||||
transient final DocValueFormat format;
|
||||
|
||||
protected Bucket(ValueFormatter formatter, boolean showDocCountError) {
|
||||
protected Bucket(DocValueFormat formatter, boolean showDocCountError) {
|
||||
// for serialization
|
||||
this.showDocCountError = showDocCountError;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
protected Bucket(long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError,
|
||||
ValueFormatter formatter) {
|
||||
DocValueFormat formatter) {
|
||||
this(formatter, showDocCountError);
|
||||
this.docCount = docCount;
|
||||
this.aggregations = aggregations;
|
||||
|
|
|
@ -21,13 +21,12 @@ package org.elasticsearch.search.aggregations.bucket.terms;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
|
||||
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -54,7 +53,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||
@Override
|
||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||
Bucket buckets = new Bucket(context.formatter(), (boolean) context.attributes().get("showDocCountError"));
|
||||
Bucket buckets = new Bucket(context.format(), (boolean) context.attributes().get("showDocCountError"));
|
||||
buckets.readFrom(in);
|
||||
return buckets;
|
||||
}
|
||||
|
@ -65,7 +64,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
Map<String, Object> attributes = new HashMap<>();
|
||||
attributes.put("showDocCountError", bucket.showDocCountError);
|
||||
context.attributes(attributes);
|
||||
context.formatter(bucket.formatter);
|
||||
context.format(bucket.format);
|
||||
return context;
|
||||
}
|
||||
};
|
||||
|
@ -79,19 +78,19 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
|
||||
long term;
|
||||
|
||||
public Bucket(ValueFormatter formatter, boolean showDocCountError) {
|
||||
super(formatter, showDocCountError);
|
||||
public Bucket(DocValueFormat format, boolean showDocCountError) {
|
||||
super(format, showDocCountError);
|
||||
}
|
||||
|
||||
public Bucket(long term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError,
|
||||
ValueFormatter formatter) {
|
||||
super(docCount, aggregations, showDocCountError, docCountError, formatter);
|
||||
DocValueFormat format) {
|
||||
super(docCount, aggregations, showDocCountError, docCountError, format);
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return formatter.format(term);
|
||||
return format.format(term);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,7 +110,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
|
||||
@Override
|
||||
Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError) {
|
||||
return new Bucket(term, docCount, aggs, showDocCountError, docCountError, formatter);
|
||||
return new Bucket(term, docCount, aggs, showDocCountError, docCountError, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -139,8 +138,8 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CommonFields.KEY, term);
|
||||
if (formatter != ValueFormatter.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, formatter.format(term));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING, format.format(term));
|
||||
}
|
||||
builder.field(CommonFields.DOC_COUNT, getDocCount());
|
||||
if (showDocCountError) {
|
||||
|
@ -152,16 +151,16 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
}
|
||||
}
|
||||
|
||||
ValueFormatter formatter;
|
||||
DocValueFormat format;
|
||||
|
||||
LongTerms() {} // for serialization
|
||||
|
||||
public LongTerms(String name, Terms.Order order, ValueFormatter formatter, int requiredSize, int shardSize, long minDocCount,
|
||||
public LongTerms(String name, Terms.Order order, DocValueFormat formatter, int requiredSize, int shardSize, long minDocCount,
|
||||
List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
||||
metaData);
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -171,20 +170,20 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
|
||||
@Override
|
||||
public LongTerms create(List<Bucket> buckets) {
|
||||
return new LongTerms(this.name, this.order, this.formatter, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
||||
return new LongTerms(this.name, this.order, this.format, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
||||
this.showTermDocCountError, this.docCountError, this.otherDocCount, this.pipelineAggregators(), this.metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
||||
return new Bucket(prototype.term, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError,
|
||||
prototype.formatter);
|
||||
prototype.format);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected LongTerms create(String name, List<org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket> buckets,
|
||||
long docCountError, long otherDocCount, InternalTerms prototype) {
|
||||
return new LongTerms(name, prototype.order, ((LongTerms) prototype).formatter, prototype.requiredSize, prototype.shardSize,
|
||||
return new LongTerms(name, prototype.order, ((LongTerms) prototype).format, prototype.requiredSize, prototype.shardSize,
|
||||
prototype.minDocCount, buckets, prototype.showTermDocCountError, docCountError, otherDocCount, prototype.pipelineAggregators(),
|
||||
prototype.getMetaData());
|
||||
}
|
||||
|
@ -193,7 +192,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
this.docCountError = in.readLong();
|
||||
this.order = InternalOrder.Streams.readOrder(in);
|
||||
this.formatter = ValueFormatterStreams.readOptional(in);
|
||||
this.format = in.readValueFormat();
|
||||
this.requiredSize = readSize(in);
|
||||
this.shardSize = readSize(in);
|
||||
this.showTermDocCountError = in.readBoolean();
|
||||
|
@ -202,7 +201,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
int size = in.readVInt();
|
||||
List<InternalTerms.Bucket> buckets = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
Bucket bucket = new Bucket(formatter, showTermDocCountError);
|
||||
Bucket bucket = new Bucket(format, showTermDocCountError);
|
||||
bucket.readFrom(in);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
|
@ -214,7 +213,7 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
|||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(docCountError);
|
||||
InternalOrder.Streams.writeOrder(order, out);
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
writeSize(requiredSize, out);
|
||||
writeSize(shardSize, out);
|
||||
out.writeBoolean(showTermDocCountError);
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.LongHash;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -33,8 +34,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -48,19 +47,19 @@ import java.util.Map;
|
|||
public class LongTermsAggregator extends TermsAggregator {
|
||||
|
||||
protected final ValuesSource.Numeric valuesSource;
|
||||
protected final ValueFormatter formatter;
|
||||
protected final DocValueFormat format;
|
||||
protected final LongHash bucketOrds;
|
||||
private boolean showTermDocCountError;
|
||||
private LongFilter longFilter;
|
||||
|
||||
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, ValueFormat format,
|
||||
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, DocValueFormat format,
|
||||
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, aggregationContext, parent, bucketCountThresholds, order, subAggCollectMode, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.showTermDocCountError = showTermDocCountError;
|
||||
this.formatter = format.formatter();
|
||||
this.format = format;
|
||||
this.longFilter = longFilter;
|
||||
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
||||
}
|
||||
|
@ -131,7 +130,7 @@ public class LongTermsAggregator extends TermsAggregator {
|
|||
LongTerms.Bucket spare = null;
|
||||
for (long i = 0; i < bucketOrds.size(); i++) {
|
||||
if (spare == null) {
|
||||
spare = new LongTerms.Bucket(0, 0, null, showTermDocCountError, 0, formatter);
|
||||
spare = new LongTerms.Bucket(0, 0, null, showTermDocCountError, 0, format);
|
||||
}
|
||||
spare.term = bucketOrds.get(i);
|
||||
spare.docCount = bucketDocCount(i);
|
||||
|
@ -160,14 +159,14 @@ public class LongTermsAggregator extends TermsAggregator {
|
|||
list[i].docCountError = 0;
|
||||
}
|
||||
|
||||
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||
return new LongTerms(name, order, format, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(),
|
||||
metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||
return new LongTerms(name, order, format, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||
bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket> emptyList(), showTermDocCountError, 0, 0,
|
||||
pipelineAggregators(), metaData());
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -29,7 +29,9 @@ import java.util.Map;
|
|||
*/
|
||||
public abstract class InternalNumericMetricsAggregation extends InternalMetricsAggregation {
|
||||
|
||||
protected ValueFormatter valueFormatter;
|
||||
private static final DocValueFormat DEFAULT_FORMAT = DocValueFormat.RAW;
|
||||
|
||||
protected DocValueFormat format = DEFAULT_FORMAT;
|
||||
|
||||
public static abstract class SingleValue extends InternalNumericMetricsAggregation implements NumericMetricsAggregation.SingleValue {
|
||||
|
||||
|
@ -41,7 +43,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalMetricsA
|
|||
|
||||
@Override
|
||||
public String getValueAsString() {
|
||||
return valueFormatter.format(value());
|
||||
return format.format(value());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -68,7 +70,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalMetricsA
|
|||
public abstract double value(String name);
|
||||
|
||||
public String valueAsString(String name) {
|
||||
return valueFormatter.format(value(name));
|
||||
return format.format(value(name));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -32,7 +33,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -47,13 +47,13 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
|
||||
LongArray counts;
|
||||
DoubleArray sums;
|
||||
ValueFormatter formatter;
|
||||
DocValueFormat format;
|
||||
|
||||
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter, AggregationContext context,
|
||||
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, AggregationContext context,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
if (valuesSource != null) {
|
||||
final BigArrays bigArrays = context.bigArrays();
|
||||
counts = bigArrays.newLongArray(1, true);
|
||||
|
@ -105,12 +105,12 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
if (valuesSource == null || bucket >= sums.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalAvg(name, sums.get(bucket), counts.get(bucket), formatter, pipelineAggregators(), metaData());
|
||||
return new InternalAvg(name, sums.get(bucket), counts.get(bucket), format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalAvg(name, 0.0, 0L, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalAvg(name, 0.0, 0L, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,12 +44,12 @@ public class AvgAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSo
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new AvgAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new AvgAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new AvgAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new AvgAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,12 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.avg;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -57,12 +56,12 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
|
|||
|
||||
InternalAvg() {} // for serialization
|
||||
|
||||
public InternalAvg(String name, double sum, long count, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalAvg(String name, double sum, long count, DocValueFormat format, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.sum = sum;
|
||||
this.count = count;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -88,19 +87,19 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
|
|||
count += ((InternalAvg) aggregation).count;
|
||||
sum += ((InternalAvg) aggregation).sum;
|
||||
}
|
||||
return new InternalAvg(getName(), sum, count, valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalAvg(getName(), sum, count, format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
sum = in.readDouble();
|
||||
count = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(sum);
|
||||
out.writeVLong(count);
|
||||
}
|
||||
|
@ -108,8 +107,8 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
|
|||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE, count != 0 ? getValue() : null);
|
||||
if (count != 0 && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(getValue()));
|
||||
if (count != 0 && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(getValue()));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.util.LongArray;
|
|||
import org.elasticsearch.common.util.ObjectArray;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -42,7 +43,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -61,15 +61,13 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
|||
private HyperLogLogPlusPlus counts;
|
||||
|
||||
private Collector collector;
|
||||
private ValueFormatter formatter;
|
||||
|
||||
public CardinalityAggregator(String name, ValuesSource valuesSource, int precision, ValueFormatter formatter,
|
||||
public CardinalityAggregator(String name, ValuesSource valuesSource, int precision,
|
||||
AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.precision = precision;
|
||||
this.counts = valuesSource == null ? null : new HyperLogLogPlusPlus(precision, context.bigArrays(), 1);
|
||||
this.formatter = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -146,12 +144,12 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
|||
// this Aggregator (and its HLL++ counters) is released.
|
||||
HyperLogLogPlusPlus copy = new HyperLogLogPlusPlus(precision, BigArrays.NON_RECYCLING_INSTANCE, 1);
|
||||
copy.merge(0, counts, owningBucketOrdinal);
|
||||
return new InternalCardinality(name, copy, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalCardinality(name, copy, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalCardinality(name, null, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalCardinality(name, null, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,13 +48,13 @@ public class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new CardinalityAggregator(name, null, precision(parent), config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new CardinalityAggregator(name, null, precision(parent), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new CardinalityAggregator(name, valuesSource, precision(parent), config.formatter(), context, parent, pipelineAggregators,
|
||||
return new CardinalityAggregator(name, valuesSource, precision(parent), context, parent, pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -27,8 +27,6 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -53,11 +51,10 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
|
|||
|
||||
private HyperLogLogPlusPlus counts;
|
||||
|
||||
InternalCardinality(String name, HyperLogLogPlusPlus counts, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
InternalCardinality(String name, HyperLogLogPlusPlus counts, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.counts = counts;
|
||||
this.valueFormatter = formatter;
|
||||
}
|
||||
|
||||
private InternalCardinality() {
|
||||
|
@ -80,7 +77,7 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
if (in.readBoolean()) {
|
||||
counts = HyperLogLogPlusPlus.readFrom(in, BigArrays.NON_RECYCLING_INSTANCE);
|
||||
} else {
|
||||
|
@ -90,7 +87,7 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
if (counts != null) {
|
||||
out.writeBoolean(true);
|
||||
counts.writeTo(0, out);
|
||||
|
@ -107,7 +104,7 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
|
|||
if (cardinality.counts != null) {
|
||||
if (reduced == null) {
|
||||
reduced = new InternalCardinality(name, new HyperLogLogPlusPlus(cardinality.counts.precision(),
|
||||
BigArrays.NON_RECYCLING_INSTANCE, 1), this.valueFormatter, pipelineAggregators(), getMetaData());
|
||||
BigArrays.NON_RECYCLING_INSTANCE, 1), pipelineAggregators(), getMetaData());
|
||||
}
|
||||
reduced.merge(cardinality);
|
||||
}
|
||||
|
@ -129,9 +126,6 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
final long cardinality = getValue();
|
||||
builder.field(CommonFields.VALUE, cardinality);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(cardinality));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,12 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.max;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -56,10 +55,10 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
|
|||
|
||||
InternalMax() {} // for serialization
|
||||
|
||||
public InternalMax(String name, double max, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalMax(String name, double max, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
|
@ -84,18 +83,18 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
|
|||
for (InternalAggregation aggregation : aggregations) {
|
||||
max = Math.max(max, ((InternalMax) aggregation).max);
|
||||
}
|
||||
return new InternalMax(name, max, valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalMax(name, max, format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
max = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(max);
|
||||
}
|
||||
|
||||
|
@ -103,8 +102,8 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean hasValue = !Double.isInfinite(max);
|
||||
builder.field(CommonFields.VALUE, hasValue ? max : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(max));
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(max));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.util.DoubleArray;
|
|||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -33,7 +34,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -45,11 +45,11 @@ import java.util.Map;
|
|||
public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat formatter;
|
||||
|
||||
DoubleArray maxes;
|
||||
|
||||
public MaxAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
public MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter,
|
||||
AggregationContext context,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
|
|
|
@ -44,13 +44,13 @@ public class MaxAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSo
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new MaxAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new MaxAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new MaxAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new MaxAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,12 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.min;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -57,11 +56,11 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
|
|||
|
||||
InternalMin() {} // for serialization
|
||||
|
||||
public InternalMin(String name, double min, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalMin(String name, double min, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.min = min;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -85,18 +84,18 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
|
|||
for (InternalAggregation aggregation : aggregations) {
|
||||
min = Math.min(min, ((InternalMin) aggregation).min);
|
||||
}
|
||||
return new InternalMin(getName(), min, this.valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalMin(getName(), min, this.format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
min = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(min);
|
||||
}
|
||||
|
||||
|
@ -104,8 +103,8 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean hasValue = !Double.isInfinite(min);
|
||||
builder.field(CommonFields.VALUE, hasValue ? min : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(min));
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(min));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.util.DoubleArray;
|
|||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -33,7 +34,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -45,11 +45,11 @@ import java.util.Map;
|
|||
public class MinAggregator extends NumericMetricsAggregator.SingleValue {
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat format;
|
||||
|
||||
DoubleArray mins;
|
||||
|
||||
public MinAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
public MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter,
|
||||
AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
|
@ -58,7 +58,7 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
mins = context.bigArrays().newDoubleArray(1, false);
|
||||
mins.fill(0, mins.size(), Double.POSITIVE_INFINITY);
|
||||
}
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -106,12 +106,12 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
if (valuesSource == null || bucket >= mins.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalMin(name, mins.get(bucket), formatter, pipelineAggregators(), metaData());
|
||||
return new InternalMin(name, mins.get(bucket), format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalMin(name, Double.POSITIVE_INFINITY, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalMin(name, Double.POSITIVE_INFINITY, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,12 +44,12 @@ public class MinAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSo
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new MinAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new MinAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new MinAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new MinAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.util.ArrayUtils;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.ObjectArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
|
@ -33,7 +34,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -47,18 +47,18 @@ public abstract class AbstractHDRPercentilesAggregator extends NumericMetricsAgg
|
|||
|
||||
protected final double[] keys;
|
||||
protected final ValuesSource.Numeric valuesSource;
|
||||
protected final ValueFormatter formatter;
|
||||
protected final DocValueFormat format;
|
||||
protected ObjectArray<DoubleHistogram> states;
|
||||
protected final int numberOfSignificantValueDigits;
|
||||
protected final boolean keyed;
|
||||
|
||||
public AbstractHDRPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent,
|
||||
double[] keys, int numberOfSignificantValueDigits, boolean keyed, ValueFormatter formatter,
|
||||
double[] keys, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.keyed = keyed;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
this.states = context.bigArrays().newObjectArray(1);
|
||||
this.keys = keys;
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
|
|
|
@ -23,11 +23,10 @@ import org.HdrHistogram.DoubleHistogram;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -43,14 +42,14 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
|
||||
AbstractInternalHDRPercentiles() {} // for serialization
|
||||
|
||||
public AbstractInternalHDRPercentiles(String name, double[] keys, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
|
||||
public AbstractInternalHDRPercentiles(String name, double[] keys, DoubleHistogram state, boolean keyed, DocValueFormat format,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.keys = keys;
|
||||
this.state = state;
|
||||
this.keyed = keyed;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -83,7 +82,7 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
keys = new double[in.readInt()];
|
||||
for (int i = 0; i < keys.length; ++i) {
|
||||
keys[i] = in.readDouble();
|
||||
|
@ -103,7 +102,7 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeInt(keys.length);
|
||||
for (int i = 0 ; i < keys.length; ++i) {
|
||||
out.writeDouble(keys[i]);
|
||||
|
@ -124,8 +123,8 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
String key = String.valueOf(keys[i]);
|
||||
double value = value(keys[i]);
|
||||
builder.field(key, value);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(key + "_as_string", valueFormatter.format(value));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(key + "_as_string", format.format(value));
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -136,8 +135,8 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
builder.startObject();
|
||||
builder.field(CommonFields.KEY, keys[i]);
|
||||
builder.field(CommonFields.VALUE, value);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(value));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(value));
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
||||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -36,9 +36,9 @@ import java.util.Map;
|
|||
public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregator {
|
||||
|
||||
public HDRPercentileRanksAggregator(String name, Numeric valuesSource, AggregationContext context, Aggregator parent,
|
||||
double[] percents, int numberOfSignificantValueDigits, boolean keyed, ValueFormatter formatter,
|
||||
double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat format,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, formatter, pipelineAggregators,
|
||||
super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, format, pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregat
|
|||
if (state == null) {
|
||||
return buildEmptyAggregation();
|
||||
} else {
|
||||
return new InternalHDRPercentileRanks(name, keys, state, keyed, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalHDRPercentileRanks(name, keys, state, keyed, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -58,7 +58,7 @@ public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregat
|
|||
state = new DoubleHistogram(numberOfSignificantValueDigits);
|
||||
state.setAutoResize(true);
|
||||
return new InternalHDRPercentileRanks(name, keys, state,
|
||||
keyed, formatter, pipelineAggregators(), metaData());
|
||||
keyed, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -54,14 +54,14 @@ public class HDRPercentileRanksAggregatorFactory
|
|||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new HDRPercentileRanksAggregator(name, null, context, parent, values, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
config.format(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new HDRPercentileRanksAggregator(name, valuesSource, context, parent, values, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
config.format(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
||||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -36,7 +36,7 @@ import java.util.Map;
|
|||
public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator {
|
||||
|
||||
public HDRPercentilesAggregator(String name, Numeric valuesSource, AggregationContext context, Aggregator parent, double[] percents,
|
||||
int numberOfSignificantValueDigits, boolean keyed, ValueFormatter formatter,
|
||||
int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, formatter,
|
||||
pipelineAggregators, metaData);
|
||||
|
@ -48,7 +48,7 @@ public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator {
|
|||
if (state == null) {
|
||||
return buildEmptyAggregation();
|
||||
} else {
|
||||
return new InternalHDRPercentiles(name, keys, state, keyed, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalHDRPercentiles(name, keys, state, keyed, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,6 +69,6 @@ public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator {
|
|||
state.setAutoResize(true);
|
||||
return new InternalHDRPercentiles(name, keys, state,
|
||||
keyed,
|
||||
formatter, pipelineAggregators(), metaData());
|
||||
format, pipelineAggregators(), metaData());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,14 +53,14 @@ public class HDRPercentilesAggregatorFactory extends ValuesSourceAggregatorFacto
|
|||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new HDRPercentilesAggregator(name, null, context, parent, percents, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
config.format(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new HDRPercentilesAggregator(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
config.format(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,12 +20,12 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
|||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -55,7 +55,7 @@ public class InternalHDRPercentileRanks extends AbstractInternalHDRPercentiles i
|
|||
InternalHDRPercentileRanks() {
|
||||
} // for serialization
|
||||
|
||||
public InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
|
||||
public InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, cdfValues, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public class InternalHDRPercentileRanks extends AbstractInternalHDRPercentiles i
|
|||
@Override
|
||||
protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalHDRPercentileRanks(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
|
||||
return new InternalHDRPercentileRanks(name, keys, merged, keyed, format, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,12 +20,12 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
|||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -55,7 +55,7 @@ public class InternalHDRPercentiles extends AbstractInternalHDRPercentiles imple
|
|||
InternalHDRPercentiles() {
|
||||
} // for serialization
|
||||
|
||||
public InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, ValueFormatter formatter,
|
||||
public InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ public class InternalHDRPercentiles extends AbstractInternalHDRPercentiles imple
|
|||
@Override
|
||||
protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalHDRPercentiles(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
|
||||
return new InternalHDRPercentiles(name, keys, merged, keyed, format, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,11 +22,10 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -40,14 +39,14 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
|
||||
AbstractInternalTDigestPercentiles() {} // for serialization
|
||||
|
||||
public AbstractInternalTDigestPercentiles(String name, double[] keys, TDigestState state, boolean keyed, ValueFormatter formatter,
|
||||
public AbstractInternalTDigestPercentiles(String name, double[] keys, TDigestState state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.keys = keys;
|
||||
this.state = state;
|
||||
this.keyed = keyed;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -79,7 +78,7 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
keys = new double[in.readInt()];
|
||||
for (int i = 0; i < keys.length; ++i) {
|
||||
keys[i] = in.readDouble();
|
||||
|
@ -90,7 +89,7 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeInt(keys.length);
|
||||
for (int i = 0 ; i < keys.length; ++i) {
|
||||
out.writeDouble(keys[i]);
|
||||
|
@ -107,8 +106,8 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
String key = String.valueOf(keys[i]);
|
||||
double value = value(keys[i]);
|
||||
builder.field(key, value);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(key + "_as_string", valueFormatter.format(value));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(key + "_as_string", format.format(value));
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -119,8 +118,8 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
builder.startObject();
|
||||
builder.field(CommonFields.KEY, keys[i]);
|
||||
builder.field(CommonFields.VALUE, value);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(value));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(value));
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.util.ArrayUtils;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.ObjectArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
|
@ -32,7 +33,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -46,13 +46,13 @@ public abstract class AbstractTDigestPercentilesAggregator extends NumericMetric
|
|||
|
||||
protected final double[] keys;
|
||||
protected final ValuesSource.Numeric valuesSource;
|
||||
protected final ValueFormatter formatter;
|
||||
protected final DocValueFormat formatter;
|
||||
protected ObjectArray<TDigestState> states;
|
||||
protected final double compression;
|
||||
protected final boolean keyed;
|
||||
|
||||
public AbstractTDigestPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, AggregationContext context, Aggregator parent,
|
||||
double[] keys, double compression, boolean keyed, ValueFormatter formatter,
|
||||
double[] keys, double compression, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -53,7 +53,7 @@ public class InternalTDigestPercentileRanks extends AbstractInternalTDigestPerce
|
|||
|
||||
InternalTDigestPercentileRanks() {} // for serialization
|
||||
|
||||
public InternalTDigestPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, ValueFormatter formatter,
|
||||
public InternalTDigestPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, cdfValues, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ public class InternalTDigestPercentileRanks extends AbstractInternalTDigestPerce
|
|||
@Override
|
||||
protected AbstractInternalTDigestPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalTDigestPercentileRanks(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
|
||||
return new InternalTDigestPercentileRanks(name, keys, merged, keyed, format, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -54,7 +54,7 @@ public class InternalTDigestPercentiles extends AbstractInternalTDigestPercentil
|
|||
InternalTDigestPercentiles() {
|
||||
} // for serialization
|
||||
|
||||
public InternalTDigestPercentiles(String name, double[] percents, TDigestState state, boolean keyed, ValueFormatter formatter,
|
||||
public InternalTDigestPercentiles(String name, double[] percents, TDigestState state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ public class InternalTDigestPercentiles extends AbstractInternalTDigestPercentil
|
|||
@Override
|
||||
protected AbstractInternalTDigestPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalTDigestPercentiles(name, keys, merged, keyed, valueFormatter, pipelineAggregators, metaData);
|
||||
return new InternalTDigestPercentiles(name, keys, merged, keyed, format, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,12 +18,12 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -35,7 +35,7 @@ import java.util.Map;
|
|||
public class TDigestPercentileRanksAggregator extends AbstractTDigestPercentilesAggregator {
|
||||
|
||||
public TDigestPercentileRanksAggregator(String name, Numeric valuesSource, AggregationContext context, Aggregator parent, double[] percents,
|
||||
double compression, boolean keyed, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
double compression, boolean keyed, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, pipelineAggregators, metaData);
|
||||
|
|
|
@ -53,14 +53,14 @@ public class TDigestPercentileRanksAggregatorFactory
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new TDigestPercentileRanksAggregator(name, null, context, parent, percents, compression, keyed, config.formatter(),
|
||||
return new TDigestPercentileRanksAggregator(name, null, context, parent, percents, compression, keyed, config.format(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new TDigestPercentileRanksAggregator(name, valuesSource, context, parent, percents, compression, keyed, config.formatter(),
|
||||
return new TDigestPercentileRanksAggregator(name, valuesSource, context, parent, percents, compression, keyed, config.format(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,12 +18,12 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -36,7 +36,7 @@ public class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggr
|
|||
|
||||
public TDigestPercentilesAggregator(String name, Numeric valuesSource, AggregationContext context,
|
||||
Aggregator parent, double[] percents,
|
||||
double compression, boolean keyed, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
double compression, boolean keyed, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -53,14 +53,14 @@ public class TDigestPercentilesAggregatorFactory
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new TDigestPercentilesAggregator(name, null, context, parent, percents, compression, keyed, config.formatter(),
|
||||
return new TDigestPercentilesAggregator(name, null, context, parent, percents, compression, keyed, config.format(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new TDigestPercentilesAggregator(name, valuesSource, context, parent, percents, compression, keyed, config.formatter(),
|
||||
return new TDigestPercentilesAggregator(name, valuesSource, context, parent, percents, compression, keyed, config.format(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -69,7 +68,7 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
|
||||
protected InternalStats() {} // for serialization
|
||||
|
||||
public InternalStats(String name, long count, double sum, double min, double max, ValueFormatter formatter,
|
||||
public InternalStats(String name, long count, double sum, double min, double max, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
|
@ -77,7 +76,7 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
this.sum = sum;
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -162,12 +161,12 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
max = Math.max(max, stats.getMax());
|
||||
sum += stats.getSum();
|
||||
}
|
||||
return new InternalStats(name, count, sum, min, max, valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalStats(name, count, sum, min, max, format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
count = in.readVLong();
|
||||
min = in.readDouble();
|
||||
max = in.readDouble();
|
||||
|
@ -180,7 +179,7 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeVLong(count);
|
||||
out.writeDouble(min);
|
||||
out.writeDouble(max);
|
||||
|
@ -210,11 +209,11 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
builder.field(Fields.MAX, count != 0 ? max : null);
|
||||
builder.field(Fields.AVG, count != 0 ? getAvg() : null);
|
||||
builder.field(Fields.SUM, count != 0 ? sum : null);
|
||||
if (count != 0 && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(Fields.MIN_AS_STRING, valueFormatter.format(min));
|
||||
builder.field(Fields.MAX_AS_STRING, valueFormatter.format(max));
|
||||
builder.field(Fields.AVG_AS_STRING, valueFormatter.format(getAvg()));
|
||||
builder.field(Fields.SUM_AS_STRING, valueFormatter.format(sum));
|
||||
if (count != 0 && format != DocValueFormat.RAW) {
|
||||
builder.field(Fields.MIN_AS_STRING, format.format(min));
|
||||
builder.field(Fields.MAX_AS_STRING, format.format(max));
|
||||
builder.field(Fields.AVG_AS_STRING, format.format(getAvg()));
|
||||
builder.field(Fields.SUM_AS_STRING, format.format(sum));
|
||||
}
|
||||
otherStatsToXCotent(builder, params);
|
||||
return builder;
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -32,7 +33,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -44,7 +44,7 @@ import java.util.Map;
|
|||
public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat format;
|
||||
|
||||
LongArray counts;
|
||||
DoubleArray sums;
|
||||
|
@ -52,7 +52,7 @@ public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
|
|||
DoubleArray maxes;
|
||||
|
||||
|
||||
public StatsAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
public StatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat format,
|
||||
AggregationContext context,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
|
@ -67,7 +67,7 @@ public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
|
|||
maxes = bigArrays.newDoubleArray(1, false);
|
||||
maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY);
|
||||
}
|
||||
this.formatter = formatter;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -156,12 +156,12 @@ public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
|
|||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalStats(name, counts.get(bucket), sums.get(bucket), mins.get(bucket),
|
||||
maxes.get(bucket), formatter, pipelineAggregators(), metaData());
|
||||
maxes.get(bucket), format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalStats(name, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalStats(name, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,12 +44,12 @@ public class StatsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new StatsAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new StatsAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new StatsAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new StatsAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,16 +25,15 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -48,7 +47,7 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
|
|||
public static final ParseField SIGMA_FIELD = new ParseField("sigma");
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat format;
|
||||
final double sigma;
|
||||
|
||||
LongArray counts;
|
||||
|
@ -57,13 +56,13 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
|
|||
DoubleArray maxes;
|
||||
DoubleArray sumOfSqrs;
|
||||
|
||||
public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter,
|
||||
AggregationContext context, Aggregator parent, double sigma, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
this.sigma = sigma;
|
||||
if (valuesSource != null) {
|
||||
final BigArrays bigArrays = context.bigArrays();
|
||||
|
@ -187,13 +186,13 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
|
|||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalExtendedStats(name, counts.get(bucket), sums.get(bucket),
|
||||
mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, formatter,
|
||||
mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, format,
|
||||
pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, sigma, formatter, pipelineAggregators(),
|
||||
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, sigma, format, pipelineAggregators(),
|
||||
metaData());
|
||||
}
|
||||
|
||||
|
|
|
@ -48,12 +48,12 @@ public class ExtendedStatsAggregatorFactory extends ValuesSourceAggregatorFactor
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new ExtendedStatsAggregator(name, null, config.formatter(), context, parent, sigma, pipelineAggregators, metaData);
|
||||
return new ExtendedStatsAggregator(name, null, config.format(), context, parent, sigma, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new ExtendedStatsAggregator(name, valuesSource, config.formatter(), context, parent, sigma, pipelineAggregators, metaData);
|
||||
return new ExtendedStatsAggregator(name, valuesSource, config.format(), context, parent, sigma, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,11 +23,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -68,7 +68,7 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
|
|||
protected InternalExtendedStats() {} // for serialization
|
||||
|
||||
public InternalExtendedStats(String name, long count, double sum, double min, double max, double sumOfSqrs, double sigma,
|
||||
ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, count, sum, min, max, formatter, pipelineAggregators, metaData);
|
||||
this.sumOfSqrs = sumOfSqrs;
|
||||
this.sigma = sigma;
|
||||
|
@ -155,7 +155,7 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
|
|||
}
|
||||
final InternalStats stats = super.doReduce(aggregations, reduceContext);
|
||||
return new InternalExtendedStats(name, stats.getCount(), stats.getSum(), stats.getMin(), stats.getMax(), sumOfSqrs, sigma,
|
||||
valueFormatter, pipelineAggregators(), getMetaData());
|
||||
format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -195,9 +195,9 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
|
|||
.field(Fields.LOWER, count != 0 ? getStdDeviationBound(Bounds.LOWER) : null)
|
||||
.endObject();
|
||||
|
||||
if (count != 0 && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(Fields.SUM_OF_SQRS_AS_STRING, valueFormatter.format(sumOfSqrs));
|
||||
builder.field(Fields.VARIANCE_AS_STRING, valueFormatter.format(getVariance()));
|
||||
if (count != 0 && format != DocValueFormat.RAW) {
|
||||
builder.field(Fields.SUM_OF_SQRS_AS_STRING, format.format(sumOfSqrs));
|
||||
builder.field(Fields.VARIANCE_AS_STRING, format.format(getVariance()));
|
||||
builder.field(Fields.STD_DEVIATION_AS_STRING, getStdDeviationAsString());
|
||||
|
||||
builder.startObject(Fields.STD_DEVIATION_BOUNDS_AS_STRING)
|
||||
|
|
|
@ -21,12 +21,11 @@ package org.elasticsearch.search.aggregations.metrics.sum;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -56,11 +55,11 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
|
|||
|
||||
InternalSum() {} // for serialization
|
||||
|
||||
InternalSum(String name, double sum, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
InternalSum(String name, double sum, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.sum = sum;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,26 +83,26 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
|
|||
for (InternalAggregation aggregation : aggregations) {
|
||||
sum += ((InternalSum) aggregation).sum;
|
||||
}
|
||||
return new InternalSum(name, sum, valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalSum(name, sum, format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
sum = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(sum);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE, sum);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(sum));
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(sum));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
@ -31,7 +32,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -43,15 +43,15 @@ import java.util.Map;
|
|||
public class SumAggregator extends NumericMetricsAggregator.SingleValue {
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
final DocValueFormat format;
|
||||
|
||||
DoubleArray sums;
|
||||
|
||||
public SumAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter, AggregationContext context,
|
||||
public SumAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, AggregationContext context,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.formatter = formatter;
|
||||
this.format = formatter;
|
||||
if (valuesSource != null) {
|
||||
sums = context.bigArrays().newDoubleArray(1, true);
|
||||
}
|
||||
|
@ -98,12 +98,12 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
if (valuesSource == null || bucket >= sums.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalSum(name, sums.get(bucket), formatter, pipelineAggregators(), metaData());
|
||||
return new InternalSum(name, sums.get(bucket), format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalSum(name, 0.0, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalSum(name, 0.0, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,12 +44,12 @@ public class SumAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSo
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new SumAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new SumAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new SumAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new SumAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -56,11 +54,10 @@ public class InternalValueCount extends InternalNumericMetricsAggregation.Single
|
|||
|
||||
InternalValueCount() {} // for serialization
|
||||
|
||||
public InternalValueCount(String name, long value, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalValueCount(String name, long value, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.value = value;
|
||||
this.valueFormatter = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,27 +81,22 @@ public class InternalValueCount extends InternalNumericMetricsAggregation.Single
|
|||
for (InternalAggregation aggregation : aggregations) {
|
||||
valueCount += ((InternalValueCount) aggregation).value;
|
||||
}
|
||||
return new InternalValueCount(name, valueCount, valueFormatter, pipelineAggregators(), getMetaData());
|
||||
return new InternalValueCount(name, valueCount, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
value = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeVLong(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE, value);
|
||||
if (!(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(value));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -46,18 +45,16 @@ import java.util.Map;
|
|||
public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
|
||||
|
||||
final ValuesSource valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
|
||||
// a count per bucket
|
||||
LongArray counts;
|
||||
|
||||
public ValueCountAggregator(String name, ValuesSource valuesSource, ValueFormatter formatter,
|
||||
public ValueCountAggregator(String name, ValuesSource valuesSource,
|
||||
AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
this.valuesSource = valuesSource;
|
||||
this.formatter = formatter;
|
||||
if (valuesSource != null) {
|
||||
counts = context.bigArrays().newLongArray(1, true);
|
||||
}
|
||||
|
@ -93,12 +90,12 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
|
|||
if (valuesSource == null || bucket >= counts.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalValueCount(name, counts.get(bucket), formatter, pipelineAggregators(), metaData());
|
||||
return new InternalValueCount(name, counts.get(bucket), pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalValueCount(name, 0L, formatter, pipelineAggregators(), metaData());
|
||||
return new InternalValueCount(name, 0L, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -43,12 +43,12 @@ public class ValueCountAggregatorFactory extends ValuesSourceAggregatorFactory<V
|
|||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new ValueCountAggregator(name, null, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new ValueCountAggregator(name, null, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new ValueCountAggregator(name, valuesSource, config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||
return new ValueCountAggregator(name, valuesSource, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,12 +22,11 @@ package org.elasticsearch.search.aggregations.pipeline;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -55,10 +54,10 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl
|
|||
protected InternalSimpleValue() {
|
||||
} // for serialization
|
||||
|
||||
public InternalSimpleValue(String name, double value, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalSimpleValue(String name, double value, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
|
@ -83,13 +82,13 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
value = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(value);
|
||||
}
|
||||
|
||||
|
@ -97,8 +96,8 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
|
||||
builder.field(CommonFields.VALUE, hasValue ? value : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(value));
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(value));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -32,8 +33,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationPath;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -46,18 +45,18 @@ import java.util.Map;
|
|||
*/
|
||||
public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAggregator {
|
||||
|
||||
protected ValueFormatter formatter;
|
||||
protected DocValueFormat format;
|
||||
protected GapPolicy gapPolicy;
|
||||
|
||||
public BucketMetricsPipelineAggregator() {
|
||||
super();
|
||||
}
|
||||
|
||||
protected BucketMetricsPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected BucketMetricsPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, metaData);
|
||||
this.gapPolicy = gapPolicy;
|
||||
this.formatter = formatter;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -113,13 +112,13 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg
|
|||
|
||||
@Override
|
||||
public void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(format);
|
||||
gapPolicy.writeTo(out);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,11 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -60,11 +59,11 @@ public abstract class BucketMetricsPipelineAggregatorBuilder<AF extends BucketMe
|
|||
return format;
|
||||
}
|
||||
|
||||
protected ValueFormatter formatter() {
|
||||
protected DocValueFormat formatter() {
|
||||
if (format != null) {
|
||||
return ValueFormat.Patternable.Number.format(format).formatter();
|
||||
return new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
return ValueFormatter.RAW;
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,11 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -58,12 +57,12 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
|
|||
super();
|
||||
}
|
||||
|
||||
public InternalBucketMetricValue(String name, String[] keys, double value, ValueFormatter formatter,
|
||||
public InternalBucketMetricValue(String name, String[] keys, double value, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.keys = keys;
|
||||
this.value = value;
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,14 +99,14 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
value = in.readDouble();
|
||||
keys = in.readStringArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDouble(value);
|
||||
out.writeStringArray(keys);
|
||||
}
|
||||
|
@ -116,8 +115,8 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean hasValue = !Double.isInfinite(value);
|
||||
builder.field(CommonFields.VALUE, hasValue ? value : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, valueFormatter.format(value));
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING, format.format(value));
|
||||
}
|
||||
builder.startArray("keys");
|
||||
for (String key : keys) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -56,9 +56,9 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
private AvgBucketPipelineAggregator() {
|
||||
}
|
||||
|
||||
protected AvgBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected AvgBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
super(name, bucketsPaths, gapPolicy, format, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -81,7 +81,7 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
double avgValue = count == 0 ? Double.NaN : (sum / count);
|
||||
return new InternalSimpleValue(name(), avgValue, formatter, pipelineAggregators, metadata);
|
||||
return new InternalSimpleValue(name(), avgValue, format, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -58,7 +58,7 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
private MaxBucketPipelineAggregator() {
|
||||
}
|
||||
|
||||
protected MaxBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected MaxBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
String[] keys = maxBucketKeys.toArray(new String[maxBucketKeys.size()]);
|
||||
return new InternalBucketMetricValue(name(), keys, maxValue, formatter, Collections.emptyList(), metaData());
|
||||
return new InternalBucketMetricValue(name(), keys, maxValue, format, Collections.emptyList(), metaData());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -58,7 +58,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
private MinBucketPipelineAggregator() {
|
||||
}
|
||||
|
||||
protected MinBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected MinBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
protected InternalAggregation buildAggregation(java.util.List<PipelineAggregator> pipelineAggregators,
|
||||
java.util.Map<String, Object> metadata) {
|
||||
String[] keys = minBucketKeys.toArray(new String[minBucketKeys.size()]);
|
||||
return new InternalBucketMetricValue(name(), keys, minValue, formatter, Collections.emptyList(), metaData());
|
||||
return new InternalBucketMetricValue(name(), keys, minValue, format, Collections.emptyList(), metaData());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
|
@ -29,8 +30,6 @@ import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
|||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -62,10 +61,10 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
|
|||
} // for serialization
|
||||
|
||||
public InternalPercentilesBucket(String name, double[] percents, double[] percentiles,
|
||||
ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.valueFormatter = formatter;
|
||||
this.format = formatter;
|
||||
this.percentiles = percentiles;
|
||||
this.percents = percents;
|
||||
}
|
||||
|
@ -82,7 +81,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
public String percentileAsString(double percent) {
|
||||
return valueFormatter.format(percentile(percent));
|
||||
return format.format(percentile(percent));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -107,14 +106,14 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
valueFormatter = ValueFormatterStreams.readOptional(in);
|
||||
format = in.readValueFormat();
|
||||
percentiles = in.readDoubleArray();
|
||||
percents = in.readDoubleArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(valueFormatter, out);
|
||||
out.writeValueFormat(format);
|
||||
out.writeDoubleArray(percentiles);
|
||||
out.writeDoubleArray(percents);
|
||||
}
|
||||
|
@ -127,7 +126,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
|
|||
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
|
||||
String key = String.valueOf(percent);
|
||||
builder.field(key, hasValue ? value : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(key + "_as_string", percentileAsString(percent));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,13 +22,13 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -62,7 +62,7 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg
|
|||
}
|
||||
|
||||
protected PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy,
|
||||
ValueFormatter formatter, Map<String, Object> metaData) {
|
||||
DocValueFormat formatter, Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
this.percents = percents;
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg
|
|||
|
||||
// todo need postCollection() to clean up temp sorted data?
|
||||
|
||||
return new InternalPercentilesBucket(name(), percents, percentiles, formatter, pipelineAggregators, metadata);
|
||||
return new InternalPercentilesBucket(name(), percents, percentiles, format, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,11 +20,11 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -47,7 +47,7 @@ public class InternalStatsBucket extends InternalStats implements StatsBucket {
|
|||
AggregationStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
public InternalStatsBucket(String name, long count, double sum, double min, double max, ValueFormatter formatter,
|
||||
public InternalStatsBucket(String name, long count, double sum, double min, double max, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, count, sum, min, max, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -55,7 +55,7 @@ public class StatsBucketPipelineAggregator extends BucketMetricsPipelineAggregat
|
|||
private double min = Double.POSITIVE_INFINITY;
|
||||
private double max = Double.NEGATIVE_INFINITY;
|
||||
|
||||
protected StatsBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected StatsBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ public class StatsBucketPipelineAggregator extends BucketMetricsPipelineAggregat
|
|||
|
||||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
return new InternalStatsBucket(name(), count, sum, min, max, formatter, pipelineAggregators, metadata);
|
||||
return new InternalStatsBucket(name(), count, sum, min, max, format, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -58,7 +58,7 @@ public class ExtendedStatsBucketPipelineAggregator extends BucketMetricsPipeline
|
|||
private double sigma;
|
||||
|
||||
protected ExtendedStatsBucketPipelineAggregator(String name, String[] bucketsPaths, double sigma, GapPolicy gapPolicy,
|
||||
ValueFormatter formatter, Map<String, Object> metaData) {
|
||||
DocValueFormat formatter, Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
this.sigma = sigma;
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ public class ExtendedStatsBucketPipelineAggregator extends BucketMetricsPipeline
|
|||
|
||||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
return new InternalExtendedStatsBucket(name(), count, sum, min, max, sumOfSqrs, sigma, formatter, pipelineAggregators, metadata);
|
||||
return new InternalExtendedStatsBucket(name(), count, sum, min, max, sumOfSqrs, sigma, format, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,11 +20,11 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -48,7 +48,7 @@ public class InternalExtendedStatsBucket extends InternalExtendedStats implement
|
|||
}
|
||||
|
||||
InternalExtendedStatsBucket(String name, long count, double sum, double min, double max, double sumOfSqrs, double sigma,
|
||||
ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, count, sum, min, max, sumOfSqrs, sigma, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -55,7 +55,7 @@ public class SumBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
private SumBucketPipelineAggregator() {
|
||||
}
|
||||
|
||||
protected SumBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter,
|
||||
protected SumBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, bucketsPaths, gapPolicy, formatter, metaData);
|
||||
}
|
||||
|
@ -77,7 +77,7 @@ public class SumBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
|
||||
@Override
|
||||
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
return new InternalSimpleValue(name(), sum, formatter, pipelineAggregators, metadata);
|
||||
return new InternalSimpleValue(name(), sum, format, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.script.CompiledScript;
|
|||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
|
@ -36,8 +37,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -64,7 +63,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
|
||||
private Script script;
|
||||
|
@ -74,7 +73,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
public BucketScriptPipelineAggregator() {
|
||||
}
|
||||
|
||||
public BucketScriptPipelineAggregator(String name, Map<String, String> bucketsPathsMap, Script script, ValueFormatter formatter,
|
||||
public BucketScriptPipelineAggregator(String name, Map<String, String> bucketsPathsMap, Script script, DocValueFormat formatter,
|
||||
GapPolicy gapPolicy, Map<String, Object> metadata) {
|
||||
super(name, bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]), metadata);
|
||||
this.bucketsPathsMap = bucketsPathsMap;
|
||||
|
@ -140,7 +139,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
script.writeTo(out);
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(formatter);
|
||||
gapPolicy.writeTo(out);
|
||||
out.writeGenericValue(bucketsPathsMap);
|
||||
}
|
||||
|
@ -149,7 +148,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
script = Script.readScript(in);
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
formatter = in.readValueFormat();
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
bucketsPathsMap = (Map<String, String>) in.readGenericValue();
|
||||
}
|
||||
|
|
|
@ -27,8 +27,7 @@ import org.elasticsearch.script.Script.ScriptField;
|
|||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -85,11 +84,11 @@ public class BucketScriptPipelineAggregatorBuilder extends PipelineAggregatorBui
|
|||
return format;
|
||||
}
|
||||
|
||||
protected ValueFormatter formatter() {
|
||||
protected DocValueFormat formatter() {
|
||||
if (format != null) {
|
||||
return ValueFormat.Patternable.Number.format(format).formatter();
|
||||
return new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
return ValueFormatter.RAW;
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline.cumulativesum;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
|
@ -30,8 +31,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -56,12 +55,12 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat formatter;
|
||||
|
||||
public CumulativeSumPipelineAggregator() {
|
||||
}
|
||||
|
||||
public CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, ValueFormatter formatter,
|
||||
public CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter,
|
||||
Map<String, Object> metadata) {
|
||||
super(name, bucketsPaths, metadata);
|
||||
this.formatter = formatter;
|
||||
|
@ -96,11 +95,11 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
formatter = in.readValueFormat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(formatter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,13 +22,12 @@ package org.elasticsearch.search.aggregations.pipeline.cumulativesum;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -67,11 +66,11 @@ public class CumulativeSumPipelineAggregatorBuilder extends PipelineAggregatorBu
|
|||
return format;
|
||||
}
|
||||
|
||||
protected ValueFormatter formatter() {
|
||||
protected DocValueFormat formatter() {
|
||||
if (format != null) {
|
||||
return ValueFormat.Patternable.Number.format(format).formatter();
|
||||
return new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
return ValueFormatter.RAW;
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline.derivative;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
|
@ -30,8 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
|
|||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -60,14 +59,14 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
private Double xAxisUnits;
|
||||
|
||||
public DerivativePipelineAggregator() {
|
||||
}
|
||||
|
||||
public DerivativePipelineAggregator(String name, String[] bucketsPaths, ValueFormatter formatter, GapPolicy gapPolicy, Long xAxisUnits,
|
||||
public DerivativePipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, Long xAxisUnits,
|
||||
Map<String, Object> metadata) {
|
||||
super(name, bucketsPaths, metadata);
|
||||
this.formatter = formatter;
|
||||
|
@ -128,7 +127,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
formatter = in.readValueFormat();
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
if (in.readBoolean()) {
|
||||
xAxisUnits = in.readDouble();
|
||||
|
@ -140,7 +139,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(formatter);
|
||||
gapPolicy.writeTo(out);
|
||||
boolean hasXAxisUnitsValue = xAxisUnits != null;
|
||||
out.writeBoolean(hasXAxisUnitsValue);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.rounding.DateTimeUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
|
||||
|
@ -31,8 +32,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -101,11 +100,11 @@ public class DerivativePipelineAggregatorBuilder extends PipelineAggregatorBuild
|
|||
|
||||
@Override
|
||||
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
|
||||
ValueFormatter formatter;
|
||||
DocValueFormat formatter;
|
||||
if (format != null) {
|
||||
formatter = ValueFormat.Patternable.Number.format(format).formatter();
|
||||
formatter = new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
formatter = ValueFormatter.RAW;
|
||||
formatter = DocValueFormat.RAW;
|
||||
}
|
||||
Long xAxisUnits = null;
|
||||
if (units != null) {
|
||||
|
|
|
@ -22,10 +22,10 @@ package org.elasticsearch.search.aggregations.pipeline.derivative;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -53,7 +53,7 @@ public class InternalDerivative extends InternalSimpleValue implements Derivativ
|
|||
InternalDerivative() {
|
||||
}
|
||||
|
||||
public InternalDerivative(String name, double value, double normalizationFactor, ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
public InternalDerivative(String name, double value, double normalizationFactor, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, value, formatter, pipelineAggregators, metaData);
|
||||
this.normalizationFactor = normalizationFactor;
|
||||
|
@ -101,8 +101,8 @@ public class InternalDerivative extends InternalSimpleValue implements Derivativ
|
|||
if (normalizationFactor > 0) {
|
||||
boolean hasValue = !(Double.isInfinite(normalizedValue()) || Double.isNaN(normalizedValue()));
|
||||
builder.field("normalized_value", hasValue ? normalizedValue() : null);
|
||||
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
|
||||
builder.field("normalized_value_as_string", valueFormatter.format(normalizedValue()));
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field("normalized_value_as_string", format.format(normalizedValue()));
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline.movavg;
|
|||
import org.elasticsearch.common.collect.EvictingQueue;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
|
@ -34,8 +35,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -65,7 +64,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
private int window;
|
||||
private MovAvgModel model;
|
||||
|
@ -75,7 +74,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
public MovAvgPipelineAggregator() {
|
||||
}
|
||||
|
||||
public MovAvgPipelineAggregator(String name, String[] bucketsPaths, ValueFormatter formatter, GapPolicy gapPolicy,
|
||||
public MovAvgPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy,
|
||||
int window, int predict, MovAvgModel model, boolean minimize, Map<String, Object> metadata) {
|
||||
super(name, bucketsPaths, metadata);
|
||||
this.formatter = formatter;
|
||||
|
@ -152,7 +151,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
if (buckets.size() > 0 && predict > 0) {
|
||||
|
||||
boolean keyed;
|
||||
ValueFormatter formatter;
|
||||
DocValueFormat formatter;
|
||||
keyed = buckets.get(0).getKeyed();
|
||||
formatter = buckets.get(0).getFormatter();
|
||||
|
||||
|
@ -251,7 +250,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
formatter = in.readValueFormat();
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
window = in.readVInt();
|
||||
predict = in.readVInt();
|
||||
|
@ -262,7 +261,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(formatter);
|
||||
gapPolicy.writeTo(out);
|
||||
out.writeVInt(window);
|
||||
out.writeVInt(predict);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline.movavg;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -31,8 +32,6 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel;
|
|||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -94,11 +93,11 @@ public class MovAvgPipelineAggregatorBuilder extends PipelineAggregatorBuilder<M
|
|||
return gapPolicy;
|
||||
}
|
||||
|
||||
protected ValueFormatter formatter() {
|
||||
protected DocValueFormat formatter() {
|
||||
if (format != null) {
|
||||
return ValueFormat.Patternable.Number.format(format).formatter();
|
||||
return new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
return ValueFormatter.RAW;
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.collect.EvictingQueue;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
|
@ -32,8 +33,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -58,14 +57,14 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private DocValueFormat formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
private int lag;
|
||||
|
||||
public SerialDiffPipelineAggregator() {
|
||||
}
|
||||
|
||||
public SerialDiffPipelineAggregator(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, GapPolicy gapPolicy,
|
||||
public SerialDiffPipelineAggregator(String name, String[] bucketsPaths, @Nullable DocValueFormat formatter, GapPolicy gapPolicy,
|
||||
int lag, Map<String, Object> metadata) {
|
||||
super(name, bucketsPaths, metadata);
|
||||
this.formatter = formatter;
|
||||
|
@ -129,14 +128,14 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
|||
|
||||
@Override
|
||||
public void doReadFrom(StreamInput in) throws IOException {
|
||||
formatter = ValueFormatterStreams.readOptional(in);
|
||||
formatter = in.readValueFormat();
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
lag = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWriteTo(StreamOutput out) throws IOException {
|
||||
ValueFormatterStreams.writeOptional(formatter, out);
|
||||
out.writeValueFormat(formatter);
|
||||
gapPolicy.writeTo(out);
|
||||
out.writeVInt(lag);
|
||||
}
|
||||
|
|
|
@ -24,9 +24,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -102,11 +101,11 @@ public class SerialDiffPipelineAggregatorBuilder extends PipelineAggregatorBuild
|
|||
return gapPolicy;
|
||||
}
|
||||
|
||||
protected ValueFormatter formatter() {
|
||||
protected DocValueFormat formatter() {
|
||||
if (format != null) {
|
||||
return ValueFormat.Patternable.Number.format(format).formatter();
|
||||
return new DocValueFormat.Decimal(format);
|
||||
} else {
|
||||
return ValueFormatter.RAW;
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,10 +29,10 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
|||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -99,19 +99,11 @@ public class AggregationContext {
|
|||
if (config.missing instanceof Number) {
|
||||
missing = (Number) config.missing;
|
||||
} else {
|
||||
if (config.fieldContext != null && config.fieldContext.fieldType() instanceof DateFieldMapper.DateFieldType) {
|
||||
final DateFieldMapper.DateFieldType fieldType = (DateFieldMapper.DateFieldType) config.fieldContext.fieldType();
|
||||
try {
|
||||
missing = fieldType.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e);
|
||||
}
|
||||
if (config.fieldContext != null && config.fieldContext.fieldType() != null) {
|
||||
missing = config.fieldContext.fieldType().docValueFormat(null, DateTimeZone.UTC)
|
||||
.parseDouble(config.missing.toString(), false, context.nowCallable());
|
||||
} else {
|
||||
try {
|
||||
missing = Double.parseDouble(config.missing.toString());
|
||||
} catch (NumberFormatException e) {
|
||||
throw new SearchParseException(context, "Expected a numeric value in [missing] but got [" + config.missing + "]", null, e);
|
||||
}
|
||||
missing = Double.parseDouble(config.missing.toString());
|
||||
}
|
||||
}
|
||||
return (VS) MissingValues.replaceMissing((ValuesSource.Numeric) vs, missing);
|
||||
|
|
|
@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -34,20 +36,17 @@ import java.io.IOException;
|
|||
*/
|
||||
public enum ValueType implements Writeable<ValueType> {
|
||||
|
||||
@Deprecated
|
||||
ANY((byte) 0, "any", "any", ValuesSourceType.ANY, IndexFieldData.class, ValueFormat.RAW),
|
||||
STRING((byte) 1, "string", "string", ValuesSourceType.BYTES,
|
||||
IndexFieldData.class,
|
||||
ValueFormat.RAW),
|
||||
IndexFieldData.class, DocValueFormat.RAW),
|
||||
LONG((byte) 2, "byte|short|integer|long", "long",
|
||||
ValuesSourceType.NUMERIC,
|
||||
IndexNumericFieldData.class, ValueFormat.RAW) {
|
||||
IndexNumericFieldData.class, DocValueFormat.RAW) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) {
|
||||
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
|
@ -58,31 +57,32 @@ public enum ValueType implements Writeable<ValueType> {
|
|||
return true;
|
||||
}
|
||||
},
|
||||
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) {
|
||||
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.DateTime.DEFAULT) {
|
||||
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class,
|
||||
new DocValueFormat.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateTimeZone.UTC)) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
IP((byte) 6, "ip", "ip", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.IPv4) {
|
||||
IP((byte) 6, "ip", "ip", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.IP) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) {
|
||||
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW) {
|
||||
@Override
|
||||
public boolean isNumeric() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
GEOPOINT((byte) 8, "geo_point", "geo_point", ValuesSourceType.GEOPOINT, IndexGeoPointFieldData.class, ValueFormat.RAW) {
|
||||
GEOPOINT((byte) 8, "geo_point", "geo_point", ValuesSourceType.GEOPOINT, IndexGeoPointFieldData.class, DocValueFormat.GEOHASH) {
|
||||
@Override
|
||||
public boolean isGeoPoint() {
|
||||
return true;
|
||||
|
@ -92,12 +92,12 @@ public enum ValueType implements Writeable<ValueType> {
|
|||
final String description;
|
||||
final ValuesSourceType valuesSourceType;
|
||||
final Class<? extends IndexFieldData> fieldDataType;
|
||||
final ValueFormat defaultFormat;
|
||||
final DocValueFormat defaultFormat;
|
||||
private final byte id;
|
||||
private String preferredName;
|
||||
|
||||
private ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, Class<? extends IndexFieldData> fieldDataType,
|
||||
ValueFormat defaultFormat) {
|
||||
DocValueFormat defaultFormat) {
|
||||
this.id = id;
|
||||
this.description = description;
|
||||
this.preferredName = preferredName;
|
||||
|
@ -131,7 +131,7 @@ public enum ValueType implements Writeable<ValueType> {
|
|||
return !isA(valueType);
|
||||
}
|
||||
|
||||
public ValueFormat defaultFormat() {
|
||||
public DocValueFormat defaultFormat() {
|
||||
return defaultFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,20 +26,16 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationInitializationException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -73,7 +69,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
private ValueType valueType = null;
|
||||
private String format = null;
|
||||
private Object missing = null;
|
||||
private DateTimeZone timeZone;
|
||||
private DateTimeZone timeZone = null;
|
||||
protected ValuesSourceConfig<VS> config;
|
||||
|
||||
protected ValuesSourceAggregatorBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
|
@ -279,7 +275,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
config.missing = missing;
|
||||
config.timeZone = timeZone;
|
||||
config.script = createScript(script, context.searchContext());
|
||||
config.format = resolveFormat(format, this.timeZone, fieldType);
|
||||
config.format = fieldType.docValueFormat(format, timeZone);
|
||||
return config;
|
||||
}
|
||||
|
||||
|
@ -288,34 +284,17 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
: context.scriptService().search(context.lookup(), script, ScriptContext.Standard.AGGS, Collections.emptyMap());
|
||||
}
|
||||
|
||||
private static ValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) {
|
||||
private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) {
|
||||
if (valueType == null) {
|
||||
return ValueFormat.RAW; // we can't figure it out
|
||||
return DocValueFormat.RAW; // we can't figure it out
|
||||
}
|
||||
ValueFormat valueFormat = valueType.defaultFormat;
|
||||
if (valueFormat != null && valueFormat instanceof ValueFormat.Patternable && format != null) {
|
||||
return ((ValueFormat.Patternable) valueFormat).create(format);
|
||||
DocValueFormat valueFormat = valueType.defaultFormat;
|
||||
if (valueFormat instanceof DocValueFormat.Decimal && format != null) {
|
||||
valueFormat = new DocValueFormat.Decimal(format);
|
||||
}
|
||||
return valueFormat;
|
||||
}
|
||||
|
||||
private static ValueFormat resolveFormat(@Nullable String format, @Nullable DateTimeZone timezone, MappedFieldType fieldType) {
|
||||
if (fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
return format != null ? ValueFormat.DateTime.format(format, timezone) : ValueFormat.DateTime.mapper(
|
||||
(DateFieldMapper.DateFieldType) fieldType, timezone);
|
||||
}
|
||||
if (fieldType instanceof IpFieldMapper.IpFieldType) {
|
||||
return ValueFormat.IPv4;
|
||||
}
|
||||
if (fieldType instanceof BooleanFieldMapper.BooleanFieldType) {
|
||||
return ValueFormat.BOOLEAN;
|
||||
}
|
||||
if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
||||
return format != null ? ValueFormat.Number.format(format) : ValueFormat.RAW;
|
||||
}
|
||||
return ValueFormat.RAW;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void doWriteTo(StreamOutput out) throws IOException {
|
||||
valuesSourceType.writeTo(out);
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
/**
|
||||
|
@ -35,7 +33,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
|
|||
ValueType scriptValueType;
|
||||
boolean unmapped = false;
|
||||
String formatPattern;
|
||||
ValueFormat format = ValueFormat.RAW;
|
||||
DocValueFormat format = DocValueFormat.RAW;
|
||||
Object missing;
|
||||
DateTimeZone timeZone;
|
||||
|
||||
|
@ -78,15 +76,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ValueFormat format() {
|
||||
public DocValueFormat format() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public ValueFormatter formatter() {
|
||||
return format.formatter();
|
||||
}
|
||||
|
||||
public ValueParser parser() {
|
||||
return format.parser();
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue