Propagate DocValueFormat to all terms aggs. #17646
TL;DR This commit should not have any impact on terms aggs, it will just make supporting ipv6 easier. Currently only the numeric terms aggs propagate the DocValueFormat instance since we use numerics to represent also dates or ip addresses. Since string terms aggs are only used for text/keyword/string fields, they do not use the format and just call toUt8String(). However when we support ipv6, ip addresses as well will be encoded in sorted doc values (just like strings) so we will need to use the DocValueFormat to format the keys.
This commit is contained in:
parent
b08d453a0a
commit
13c1efd4d1
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.util.LongHash;
|
import org.elasticsearch.common.util.LongHash;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||||
|
@ -51,12 +52,13 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
|
||||||
private final SignificanceHeuristic significanceHeuristic;
|
private final SignificanceHeuristic significanceHeuristic;
|
||||||
|
|
||||||
public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories,
|
public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories,
|
||||||
ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds,
|
ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, DocValueFormat format,
|
||||||
IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent,
|
BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude,
|
||||||
|
AggregationContext aggregationContext, Aggregator parent,
|
||||||
SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory,
|
SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
|
|
||||||
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent,
|
super(name, factories, valuesSource, null, format, bucketCountThresholds, includeExclude, aggregationContext, parent,
|
||||||
SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData);
|
SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData);
|
||||||
this.significanceHeuristic = significanceHeuristic;
|
this.significanceHeuristic = significanceHeuristic;
|
||||||
this.termsAggFactory = termsAggFactory;
|
this.termsAggFactory = termsAggFactory;
|
||||||
|
@ -159,11 +161,11 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
|
||||||
private final LongHash bucketOrds;
|
private final LongHash bucketOrds;
|
||||||
|
|
||||||
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
|
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
|
||||||
BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude,
|
DocValueFormat format, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||||
SignificantTermsAggregatorFactory termsAggFactory, List<PipelineAggregator> pipelineAggregators,
|
SignificantTermsAggregatorFactory termsAggFactory, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, significanceHeuristic,
|
super(name, factories, valuesSource, format, bucketCountThresholds, includeExclude, aggregationContext, parent, significanceHeuristic,
|
||||||
termsAggFactory, pipelineAggregators, metaData);
|
termsAggFactory, pipelineAggregators, metaData);
|
||||||
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||||
|
@ -49,12 +50,12 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
|
||||||
protected final SignificantTermsAggregatorFactory termsAggFactory;
|
protected final SignificantTermsAggregatorFactory termsAggFactory;
|
||||||
private final SignificanceHeuristic significanceHeuristic;
|
private final SignificanceHeuristic significanceHeuristic;
|
||||||
|
|
||||||
public SignificantStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
public SignificantStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format,
|
||||||
BucketCountThresholds bucketCountThresholds, IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
|
BucketCountThresholds bucketCountThresholds, IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
|
||||||
Aggregator parent, SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory,
|
Aggregator parent, SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
|
|
||||||
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent,
|
super(name, factories, valuesSource, null, format, bucketCountThresholds, includeExclude, aggregationContext, parent,
|
||||||
SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData);
|
SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData);
|
||||||
this.significanceHeuristic = significanceHeuristic;
|
this.significanceHeuristic = significanceHeuristic;
|
||||||
this.termsAggFactory = termsAggFactory;
|
this.termsAggFactory = termsAggFactory;
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.PostingsEnum;
|
import org.apache.lucene.index.PostingsEnum;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
@ -32,6 +31,7 @@ import org.elasticsearch.common.lucene.index.FilterableTermsEnum;
|
||||||
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
|
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
|
@ -205,7 +205,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert execution != null;
|
assert execution != null;
|
||||||
return execution.create(name, factories, valuesSource, bucketCountThresholds, includeExclude, context, parent,
|
return execution.create(name, factories, valuesSource, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||||
significanceHeuristic, this, pipelineAggregators, metaData);
|
significanceHeuristic, this, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,13 +237,13 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||||
MAP(new ParseField("map")) {
|
MAP(new ParseField("map")) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||||
return new SignificantStringTermsAggregator(name, factories, valuesSource, bucketCountThresholds, filter,
|
return new SignificantStringTermsAggregator(name, factories, valuesSource, format, bucketCountThresholds, filter,
|
||||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,32 +251,31 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||||
GLOBAL_ORDINALS(new ParseField("global_ordinals")) {
|
GLOBAL_ORDINALS(new ParseField("global_ordinals")) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
|
ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
|
||||||
IndexSearcher indexSearcher = aggregationContext.searchContext().searcher();
|
|
||||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||||
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
|
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
|
||||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent,
|
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||||
significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
},
|
},
|
||||||
GLOBAL_ORDINALS_HASH(new ParseField("global_ordinals_hash")) {
|
GLOBAL_ORDINALS_HASH(new ParseField("global_ordinals_hash")) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||||
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
|
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
|
||||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent,
|
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||||
significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -295,7 +294,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||||
this.parseField = parseField;
|
this.parseField = parseField;
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||||
|
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
|
@ -35,15 +36,15 @@ abstract class AbstractStringTermsAggregator extends TermsAggregator {
|
||||||
protected final boolean showTermDocCountError;
|
protected final boolean showTermDocCountError;
|
||||||
|
|
||||||
public AbstractStringTermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent,
|
public AbstractStringTermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent,
|
||||||
Terms.Order order, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode subAggCollectMode,
|
Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, context, parent, bucketCountThresholds, order, subAggCollectMode, pipelineAggregators, metaData);
|
super(name, factories, context, parent, bucketCountThresholds, order, format, subAggCollectMode, pipelineAggregators, metaData);
|
||||||
this.showTermDocCountError = showTermDocCountError;
|
this.showTermDocCountError = showTermDocCountError;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public InternalAggregation buildEmptyAggregation() {
|
public InternalAggregation buildEmptyAggregation() {
|
||||||
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
return new StringTerms(name, order, format, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||||
bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket> emptyList(), showTermDocCountError, 0, 0,
|
bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket> emptyList(), showTermDocCountError, 0, 0,
|
||||||
pipelineAggregators(), metaData());
|
pipelineAggregators(), metaData());
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -152,17 +151,14 @@ public class DoubleTerms extends InternalTerms<DoubleTerms, DoubleTerms.Bucket>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private DocValueFormat format;
|
|
||||||
|
|
||||||
DoubleTerms() {
|
DoubleTerms() {
|
||||||
} // for serialization
|
} // for serialization
|
||||||
|
|
||||||
public DoubleTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize,
|
public DoubleTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize,
|
||||||
long minDocCount, List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError,
|
long minDocCount, List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError,
|
||||||
long otherDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
long otherDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||||
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
super(name, order, format, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
||||||
metaData);
|
metaData);
|
||||||
this.format = format;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.util.LongHash;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds;
|
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalMapping;
|
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalMapping;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
|
@ -71,13 +72,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
protected RandomAccessOrds globalOrds;
|
protected RandomAccessOrds globalOrds;
|
||||||
|
|
||||||
public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
|
public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
|
||||||
Terms.Order order, BucketCountThresholds bucketCountThresholds,
|
Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds,
|
||||||
IncludeExclude.OrdinalsFilter includeExclude,
|
IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError,
|
SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError,
|
super(name, factories, aggregationContext, parent, order, format, bucketCountThresholds, collectionMode, showTermDocCountError,
|
||||||
pipelineAggregators,
|
pipelineAggregators, metaData);
|
||||||
metaData);
|
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.includeExclude = includeExclude;
|
this.includeExclude = includeExclude;
|
||||||
}
|
}
|
||||||
|
@ -187,7 +187,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
survivingBucketOrds[i] = bucket.bucketOrd;
|
survivingBucketOrds[i] = bucket.bucketOrd;
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
copy(globalOrds.lookupOrd(bucket.globalOrd), scratch);
|
copy(globalOrds.lookupOrd(bucket.globalOrd), scratch);
|
||||||
list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null, showTermDocCountError, 0);
|
list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null, showTermDocCountError, 0, format);
|
||||||
list[i].bucketOrd = bucket.bucketOrd;
|
list[i].bucketOrd = bucket.bucketOrd;
|
||||||
otherDocCount -= list[i].docCount;
|
otherDocCount -= list[i].docCount;
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
bucket.docCountError = 0;
|
bucket.docCountError = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
return new StringTerms(name, order, format, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||||
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(),
|
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(),
|
||||||
metaData());
|
metaData());
|
||||||
}
|
}
|
||||||
|
@ -266,12 +266,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
|
|
||||||
private final LongHash bucketOrds;
|
private final LongHash bucketOrds;
|
||||||
|
|
||||||
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
|
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order,
|
||||||
Terms.Order order, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext,
|
DocValueFormat format, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude,
|
||||||
Aggregator parent, SubAggCollectionMode collectionMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode,
|
super(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode,
|
||||||
showTermDocCountError, pipelineAggregators, metaData);
|
showTermDocCountError, pipelineAggregators, metaData);
|
||||||
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
||||||
}
|
}
|
||||||
|
@ -340,11 +340,11 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
private RandomAccessOrds segmentOrds;
|
private RandomAccessOrds segmentOrds;
|
||||||
|
|
||||||
public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
|
public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource,
|
||||||
Terms.Order order,
|
Terms.Order order, DocValueFormat format,
|
||||||
BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||||
SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators,
|
SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, valuesSource, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode,
|
super(name, factories, valuesSource, order, format, bucketCountThresholds, null, aggregationContext, parent, collectionMode,
|
||||||
showTermDocCountError, pipelineAggregators, metaData);
|
showTermDocCountError, pipelineAggregators, metaData);
|
||||||
assert factories == null || factories.countAggregators() == 0;
|
assert factories == null || factories.countAggregators() == 0;
|
||||||
this.segmentDocCounts = context.bigArrays().newIntArray(1, true);
|
this.segmentDocCounts = context.bigArrays().newIntArray(1, true);
|
||||||
|
|
|
@ -110,6 +110,7 @@ public abstract class InternalTerms<A extends InternalTerms, B extends InternalT
|
||||||
|
|
||||||
protected Terms.Order order;
|
protected Terms.Order order;
|
||||||
protected int requiredSize;
|
protected int requiredSize;
|
||||||
|
protected DocValueFormat format;
|
||||||
protected int shardSize;
|
protected int shardSize;
|
||||||
protected long minDocCount;
|
protected long minDocCount;
|
||||||
protected List<? extends Bucket> buckets;
|
protected List<? extends Bucket> buckets;
|
||||||
|
@ -120,11 +121,12 @@ public abstract class InternalTerms<A extends InternalTerms, B extends InternalT
|
||||||
|
|
||||||
protected InternalTerms() {} // for serialization
|
protected InternalTerms() {} // for serialization
|
||||||
|
|
||||||
protected InternalTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount,
|
protected InternalTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize, long minDocCount,
|
||||||
List<? extends Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, List<PipelineAggregator> pipelineAggregators,
|
List<? extends Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, List<PipelineAggregator> pipelineAggregators,
|
||||||
Map<String, Object> metaData) {
|
Map<String, Object> metaData) {
|
||||||
super(name, pipelineAggregators, metaData);
|
super(name, pipelineAggregators, metaData);
|
||||||
this.order = order;
|
this.order = order;
|
||||||
|
this.format = format;
|
||||||
this.requiredSize = requiredSize;
|
this.requiredSize = requiredSize;
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
this.minDocCount = minDocCount;
|
this.minDocCount = minDocCount;
|
||||||
|
|
|
@ -151,16 +151,13 @@ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DocValueFormat format;
|
|
||||||
|
|
||||||
LongTerms() {} // for serialization
|
LongTerms() {} // for serialization
|
||||||
|
|
||||||
public LongTerms(String name, Terms.Order order, DocValueFormat formatter, int requiredSize, int shardSize, long minDocCount,
|
public LongTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize, long minDocCount,
|
||||||
List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
|
List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||||
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
super(name, order, format, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount,
|
||||||
metaData);
|
pipelineAggregators, metaData);
|
||||||
this.format = formatter;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -47,7 +47,6 @@ import java.util.Map;
|
||||||
public class LongTermsAggregator extends TermsAggregator {
|
public class LongTermsAggregator extends TermsAggregator {
|
||||||
|
|
||||||
protected final ValuesSource.Numeric valuesSource;
|
protected final ValuesSource.Numeric valuesSource;
|
||||||
protected final DocValueFormat format;
|
|
||||||
protected final LongHash bucketOrds;
|
protected final LongHash bucketOrds;
|
||||||
private boolean showTermDocCountError;
|
private boolean showTermDocCountError;
|
||||||
private LongFilter longFilter;
|
private LongFilter longFilter;
|
||||||
|
@ -56,10 +55,9 @@ public class LongTermsAggregator extends TermsAggregator {
|
||||||
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
|
||||||
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter,
|
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, aggregationContext, parent, bucketCountThresholds, order, subAggCollectMode, pipelineAggregators, metaData);
|
super(name, factories, aggregationContext, parent, bucketCountThresholds, order, format, subAggCollectMode, pipelineAggregators, metaData);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.showTermDocCountError = showTermDocCountError;
|
this.showTermDocCountError = showTermDocCountError;
|
||||||
this.format = format;
|
|
||||||
this.longFilter = longFilter;
|
this.longFilter = longFilter;
|
||||||
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
|
@ -54,7 +55,9 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
|
||||||
@Override
|
@Override
|
||||||
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
|
||||||
Bucket buckets = new Bucket((boolean) context.attributes().get("showDocCountError"));
|
Bucket buckets = new Bucket(
|
||||||
|
context.format(),
|
||||||
|
(boolean) context.attributes().get("showDocCountError"));
|
||||||
buckets.readFrom(in);
|
buckets.readFrom(in);
|
||||||
return buckets;
|
return buckets;
|
||||||
}
|
}
|
||||||
|
@ -78,12 +81,13 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
|
|
||||||
BytesRef termBytes;
|
BytesRef termBytes;
|
||||||
|
|
||||||
public Bucket(boolean showDocCountError) {
|
public Bucket(DocValueFormat format, boolean showDocCountError) {
|
||||||
super(null, showDocCountError);
|
super(format, showDocCountError);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Bucket(BytesRef term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError) {
|
public Bucket(BytesRef term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError,
|
||||||
super(docCount, aggregations, showDocCountError, docCountError, null);
|
DocValueFormat format) {
|
||||||
|
super(docCount, aggregations, showDocCountError, docCountError, format);
|
||||||
this.termBytes = term;
|
this.termBytes = term;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +104,7 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getKeyAsString() {
|
public String getKeyAsString() {
|
||||||
return termBytes.utf8ToString();
|
return format.format(termBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -110,7 +114,7 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError) {
|
Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError) {
|
||||||
return new Bucket(termBytes, docCount, aggs, showDocCountError, docCountError);
|
return new Bucket(termBytes, docCount, aggs, showDocCountError, docCountError, format);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -151,10 +155,10 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
StringTerms() {
|
StringTerms() {
|
||||||
} // for serialization
|
} // for serialization
|
||||||
|
|
||||||
public StringTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount,
|
public StringTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize, long minDocCount,
|
||||||
List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
|
List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
|
||||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||||
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
super(name, order, format, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, pipelineAggregators,
|
||||||
metaData);
|
metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,19 +169,19 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StringTerms create(List<Bucket> buckets) {
|
public StringTerms create(List<Bucket> buckets) {
|
||||||
return new StringTerms(this.name, this.order, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
return new StringTerms(this.name, this.order, this.format, this.requiredSize, this.shardSize, this.minDocCount, buckets,
|
||||||
this.showTermDocCountError, this.docCountError, this.otherDocCount, this.pipelineAggregators(), this.metaData);
|
this.showTermDocCountError, this.docCountError, this.otherDocCount, this.pipelineAggregators(), this.metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
|
||||||
return new Bucket(prototype.termBytes, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError);
|
return new Bucket(prototype.termBytes, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError, prototype.format);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected StringTerms create(String name, List<org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket> buckets,
|
protected StringTerms create(String name, List<org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket> buckets,
|
||||||
long docCountError, long otherDocCount, InternalTerms prototype) {
|
long docCountError, long otherDocCount, InternalTerms prototype) {
|
||||||
return new StringTerms(name, prototype.order, prototype.requiredSize, prototype.shardSize, prototype.minDocCount, buckets,
|
return new StringTerms(name, prototype.order, prototype.format, prototype.requiredSize, prototype.shardSize, prototype.minDocCount, buckets,
|
||||||
prototype.showTermDocCountError, docCountError, otherDocCount, prototype.pipelineAggregators(), prototype.getMetaData());
|
prototype.showTermDocCountError, docCountError, otherDocCount, prototype.pipelineAggregators(), prototype.getMetaData());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,6 +189,7 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
protected void doReadFrom(StreamInput in) throws IOException {
|
protected void doReadFrom(StreamInput in) throws IOException {
|
||||||
this.docCountError = in.readLong();
|
this.docCountError = in.readLong();
|
||||||
this.order = InternalOrder.Streams.readOrder(in);
|
this.order = InternalOrder.Streams.readOrder(in);
|
||||||
|
this.format = in.readValueFormat();
|
||||||
this.requiredSize = readSize(in);
|
this.requiredSize = readSize(in);
|
||||||
this.shardSize = readSize(in);
|
this.shardSize = readSize(in);
|
||||||
this.showTermDocCountError = in.readBoolean();
|
this.showTermDocCountError = in.readBoolean();
|
||||||
|
@ -193,7 +198,7 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
List<InternalTerms.Bucket> buckets = new ArrayList<>(size);
|
List<InternalTerms.Bucket> buckets = new ArrayList<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
Bucket bucket = new Bucket(showTermDocCountError);
|
Bucket bucket = new Bucket(format, showTermDocCountError);
|
||||||
bucket.readFrom(in);
|
bucket.readFrom(in);
|
||||||
buckets.add(bucket);
|
buckets.add(bucket);
|
||||||
}
|
}
|
||||||
|
@ -205,6 +210,7 @@ public class StringTerms extends InternalTerms<StringTerms, StringTerms.Bucket>
|
||||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||||
out.writeLong(docCountError);
|
out.writeLong(docCountError);
|
||||||
InternalOrder.Streams.writeOrder(order, out);
|
InternalOrder.Streams.writeOrder(order, out);
|
||||||
|
out.writeValueFormat(format);
|
||||||
writeSize(requiredSize, out);
|
writeSize(requiredSize, out);
|
||||||
writeSize(shardSize, out);
|
writeSize(shardSize, out);
|
||||||
out.writeBoolean(showTermDocCountError);
|
out.writeBoolean(showTermDocCountError);
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.util.BytesRefBuilder;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
import org.elasticsearch.common.util.BytesRefHash;
|
||||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
|
@ -50,13 +51,13 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
private final IncludeExclude.StringFilter includeExclude;
|
private final IncludeExclude.StringFilter includeExclude;
|
||||||
|
|
||||||
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
||||||
Terms.Order order, BucketCountThresholds bucketCountThresholds,
|
Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds,
|
||||||
IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
|
IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
|
||||||
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators,
|
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError,
|
||||||
Map<String, Object> metaData) throws IOException {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
|
|
||||||
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError, pipelineAggregators,
|
super(name, factories, aggregationContext, parent, order, format, bucketCountThresholds, collectionMode, showTermDocCountError,
|
||||||
metaData);
|
pipelineAggregators, metaData);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.includeExclude = includeExclude;
|
this.includeExclude = includeExclude;
|
||||||
bucketOrds = new BytesRefHash(1, aggregationContext.bigArrays());
|
bucketOrds = new BytesRefHash(1, aggregationContext.bigArrays());
|
||||||
|
@ -132,7 +133,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
StringTerms.Bucket spare = null;
|
StringTerms.Bucket spare = null;
|
||||||
for (int i = 0; i < bucketOrds.size(); i++) {
|
for (int i = 0; i < bucketOrds.size(); i++) {
|
||||||
if (spare == null) {
|
if (spare == null) {
|
||||||
spare = new StringTerms.Bucket(new BytesRef(), 0, null, showTermDocCountError, 0);
|
spare = new StringTerms.Bucket(new BytesRef(), 0, null, showTermDocCountError, 0, format);
|
||||||
}
|
}
|
||||||
bucketOrds.get(i, spare.termBytes);
|
bucketOrds.get(i, spare.termBytes);
|
||||||
spare.docCount = bucketDocCount(i);
|
spare.docCount = bucketDocCount(i);
|
||||||
|
@ -163,7 +164,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
bucket.docCountError = 0;
|
bucket.docCountError = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
return new StringTerms(name, order, format, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
|
||||||
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(),
|
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, pipelineAggregators(),
|
||||||
metaData());
|
metaData());
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
||||||
|
@ -172,15 +173,19 @@ public abstract class TermsAggregator extends BucketsAggregator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected final DocValueFormat format;
|
||||||
protected final BucketCountThresholds bucketCountThresholds;
|
protected final BucketCountThresholds bucketCountThresholds;
|
||||||
protected final Terms.Order order;
|
protected final Terms.Order order;
|
||||||
protected final Set<Aggregator> aggsUsedForSorting = new HashSet<>();
|
protected final Set<Aggregator> aggsUsedForSorting = new HashSet<>();
|
||||||
protected final SubAggCollectionMode collectMode;
|
protected final SubAggCollectionMode collectMode;
|
||||||
|
|
||||||
public TermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, BucketCountThresholds bucketCountThresholds, Terms.Order order, SubAggCollectionMode collectMode, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
public TermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent,
|
||||||
|
BucketCountThresholds bucketCountThresholds, Terms.Order order, DocValueFormat format, SubAggCollectionMode collectMode,
|
||||||
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, factories, context, parent, pipelineAggregators, metaData);
|
super(name, factories, context, parent, pipelineAggregators, metaData);
|
||||||
this.bucketCountThresholds = bucketCountThresholds;
|
this.bucketCountThresholds = bucketCountThresholds;
|
||||||
this.order = InternalOrder.validate(order, this);
|
this.order = InternalOrder.validate(order, this);
|
||||||
|
this.format = format;
|
||||||
this.collectMode = collectMode;
|
this.collectMode = collectMode;
|
||||||
// Don't defer any child agg if we are dependent on it for pruning results
|
// Don't defer any child agg if we are dependent on it for pruning results
|
||||||
if (order instanceof Aggregation){
|
if (order instanceof Aggregation){
|
||||||
|
|
|
@ -232,8 +232,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
||||||
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(AggregationContext context, ValuesSourceConfig<ValuesSource> config,
|
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(AggregationContext context, ValuesSourceConfig<ValuesSource> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new TermsAggregatorFactory(name, type, config, order, includeExclude, executionHint, collectMode,
|
return new TermsAggregatorFactory(name, type, config, order, includeExclude, executionHint, collectMode,
|
||||||
bucketCountThresholds,
|
bucketCountThresholds, showTermDocCountError, context, parent, subFactoriesBuilder, metaData);
|
||||||
showTermDocCountError, context, parent, subFactoriesBuilder, metaData);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
|
@ -68,7 +69,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(),
|
final InternalAggregation aggregation = new UnmappedTerms(name, order, config.format(), bucketCountThresholds.getRequiredSize(),
|
||||||
bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData);
|
bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData);
|
||||||
return new NonCollectingAggregator(name, context, parent, factories, pipelineAggregators, metaData) {
|
return new NonCollectingAggregator(name, context, parent, factories, pipelineAggregators, metaData) {
|
||||||
{
|
{
|
||||||
|
@ -150,7 +151,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return execution.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, context, parent,
|
return execution.create(name, factories, valuesSource, order, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||||
collectMode, showTermDocCountError, pipelineAggregators, metaData);
|
collectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,13 +188,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||||
return new StringTermsAggregator(name, factories, valuesSource, order, bucketCountThresholds, filter, aggregationContext,
|
return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter,
|
||||||
parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -206,13 +207,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||||
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order,
|
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order,
|
||||||
bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||||
pipelineAggregators, metaData);
|
pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,13 +227,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||||
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource,
|
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource,
|
||||||
order, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||||
pipelineAggregators, metaData);
|
pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +246,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -253,11 +254,11 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
// we need the FieldData impl to be able to extract the
|
// we need the FieldData impl to be able to extract the
|
||||||
// segment to global ord mapping
|
// segment to global ord mapping
|
||||||
|| valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) {
|
|| valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) {
|
||||||
return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude,
|
return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude,
|
||||||
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories,
|
return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories,
|
||||||
(ValuesSource.Bytes.WithOrdinals) valuesSource, order, bucketCountThresholds, aggregationContext, parent,
|
(ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, aggregationContext, parent,
|
||||||
subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,7 +284,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
|
||||||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.AggregationStreams;
|
import org.elasticsearch.search.aggregations.AggregationStreams;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
|
@ -56,9 +57,9 @@ public class UnmappedTerms extends InternalTerms<UnmappedTerms, InternalTerms.Bu
|
||||||
|
|
||||||
UnmappedTerms() {} // for serialization
|
UnmappedTerms() {} // for serialization
|
||||||
|
|
||||||
public UnmappedTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, List<PipelineAggregator> pipelineAggregators,
|
public UnmappedTerms(String name, Terms.Order order, DocValueFormat format, int requiredSize, int shardSize, long minDocCount,
|
||||||
Map<String, Object> metaData) {
|
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||||
super(name, order, requiredSize, shardSize, minDocCount, BUCKETS, false, 0, 0, pipelineAggregators, metaData);
|
super(name, order, format, requiredSize, shardSize, minDocCount, BUCKETS, false, 0, 0, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -68,7 +69,7 @@ public class UnmappedTerms extends InternalTerms<UnmappedTerms, InternalTerms.Bu
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UnmappedTerms create(List<InternalTerms.Bucket> buckets) {
|
public UnmappedTerms create(List<InternalTerms.Bucket> buckets) {
|
||||||
return new UnmappedTerms(this.name, this.order, this.requiredSize, this.shardSize, this.minDocCount, this.pipelineAggregators(), this.metaData);
|
return new UnmappedTerms(this.name, this.order, this.format, this.requiredSize, this.shardSize, this.minDocCount, this.pipelineAggregators(), this.metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue