Adds reducers list to InternalAggregation.reduce()

The list of reducers is fed through from the AggregatorFactory
This commit is contained in:
Colin Goodheart-Smithe 2015-02-11 16:19:48 +00:00
parent e2949d7df1
commit c60bb4d73b
90 changed files with 1181 additions and 640 deletions

View File

@ -105,7 +105,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
* Build an empty aggregation.
*/
public abstract InternalAggregation buildEmptyAggregation();
/** Aggregation mode for sub aggregations. */
public enum SubAggCollectionMode {

View File

@ -23,10 +23,13 @@ import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -38,6 +41,7 @@ public abstract class AggregatorFactory {
protected String type;
protected AggregatorFactory parent;
protected AggregatorFactories factories = AggregatorFactories.EMPTY;
protected List<Reducer> reducers = Collections.emptyList();
protected Map<String, Object> metaData;
/**
@ -79,7 +83,8 @@ public abstract class AggregatorFactory {
return parent;
}
protected abstract Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException;
protected abstract Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException;
/**
* Creates the aggregator
@ -92,7 +97,7 @@ public abstract class AggregatorFactory {
* @return The created aggregator
*/
public final Aggregator create(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket) throws IOException {
return createInternal(context, parent, collectsFromSingleBucket, this.metaData);
return createInternal(context, parent, collectsFromSingleBucket, this.reducers, this.metaData);
}
public void doValidate() {
@ -102,16 +107,18 @@ public abstract class AggregatorFactory {
this.metaData = metaData;
}
public void setReducers(List<Reducer> reducers) {
this.reducers = reducers;
}
/**
* Utility method. Given an {@link AggregatorFactory} that creates {@link Aggregator}s that only know how
* to collect bucket <tt>0</tt>, this returns an aggregator that can collect any bucket.
*/
protected static Aggregator asMultiBucketAggregator(final AggregatorFactory factory, final AggregationContext context, final Aggregator parent) throws IOException {
final Aggregator first = factory.create(context, parent, true);
final BigArrays bigArrays = context.bigArrays();
return new Aggregator() {
ObjectArray<Aggregator> aggregators;
final Aggregator first = factory.create(context, parent, truegator> aggregators;
ObjectArray<LeafBucketCollector> collectors;
{
@ -187,9 +194,9 @@ public abstract class AggregatorFactory {
LeafBucketCollector collector = collectors.get(bucket);
if (collector == null) {
Aggregator aggregator = aggregators.get(bucket);
if (aggregator == null) {
aggregator = factory.create(context, parent, true);
aggregator.preCollection();
if (aggregator == null) {
aggregator = factory.create(context, parent, true);
aggregator.preCollection();
aggregators.set(bucket, aggregator);
}
collector = aggregator.getLeafCollector(ctx);
@ -197,7 +204,7 @@ public abstract class AggregatorFactory {
collectors.set(bucket, collector);
}
collector.collect(doc, 0);
}
}
};
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import java.io.IOException;
@ -116,6 +117,8 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
protected Map<String, Object> metaData;
private List<Reducer> reducers;
/** Constructs an un initialized addAggregation (used for serialization) **/
protected InternalAggregation() {}
@ -124,8 +127,9 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
*
* @param name The name of the get.
*/
protected InternalAggregation(String name, Map<String, Object> metaData) {
protected InternalAggregation(String name, List<Reducer> reducers, Map<String, Object> metaData) {
this.name = name;
this.reducers = reducers;
this.metaData = metaData;
}
@ -146,7 +150,11 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
* construction.
*/
public final InternalAggregation reduce(ReduceContext reduceContext) {
return doReduce(reduceContext);
InternalAggregation aggResult = doReduce(reduceContext);
for (Reducer reducer : reducers) {
aggResult = reducer.reduce(aggResult, reduceContext);
}
return aggResult;
}
public abstract InternalAggregation doReduce(ReduceContext reduceContext);
@ -180,6 +188,10 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
return metaData;
}
public List<Reducer> reducers() {
return reducers;
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.util.List;
import java.util.Map;
@ -30,8 +31,8 @@ public abstract class InternalMultiBucketAggregation extends InternalAggregation
public InternalMultiBucketAggregation() {
}
public InternalMultiBucketAggregation(String name, Map<String, Object> metaData) {
super(name, metaData);
public InternalMultiBucketAggregation(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
@Override

View File

@ -20,9 +20,11 @@
package org.elasticsearch.search.aggregations;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -31,12 +33,14 @@ import java.util.Map;
*/
public abstract class NonCollectingAggregator extends AggregatorBase {
protected NonCollectingAggregator(String name, AggregationContext context, Aggregator parent, AggregatorFactories subFactories, Map<String, Object> metaData) throws IOException {
super(name, subFactories, context, parent, metaData);
protected NonCollectingAggregator(String name, AggregationContext context, Aggregator parent, AggregatorFactories subFactories,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, subFactories, context, parent, reducers, metaData);
}
protected NonCollectingAggregator(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
this(name, context, parent, AggregatorFactories.EMPTY, metaData);
protected NonCollectingAggregator(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
this(name, context, parent, AggregatorFactories.EMPTY, reducers, metaData);
}
@Override

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorBase;
import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -31,6 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -42,9 +44,9 @@ public abstract class BucketsAggregator extends AggregatorBase {
private IntArray docCounts;
public BucketsAggregator(String name, AggregatorFactories factories,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, metaData);
bigArrays = context.bigArrays();
AggregationContext context, Aggregator parent,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, reducers, metaData);
docCounts = bigArrays.newIntArray(1, true);
}
@ -110,11 +112,11 @@ public abstract class BucketsAggregator extends AggregatorBase {
*/
protected final InternalAggregations bucketAggregations(long bucket) throws IOException {
final InternalAggregation[] aggregations = new InternalAggregation[subAggregators.length];
for (int i = 0; i < subAggregators.length; i++) {
for (int i = 0; i < subAggregators.length; i++) {
aggregations[i] = subAggregators[i].buildAggregation(bucket);
}
}
return new InternalAggregations(Arrays.asList(aggregations));
}
}
/**
* Utility method to build empty aggregations of the sub aggregators.

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -47,8 +48,8 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio
* @param docCount The document count in the single bucket.
* @param aggregations The already built sub-aggregations that are associated with the bucket.
*/
protected InternalSingleBucketAggregation(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, metaData);
protected InternalSingleBucketAggregation(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.docCount = docCount;
this.aggregations = aggregations;
}

View File

@ -20,9 +20,11 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -31,8 +33,9 @@ import java.util.Map;
public abstract class SingleBucketAggregator extends BucketsAggregator {
protected SingleBucketAggregator(String name, AggregatorFactories factories,
AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
AggregationContext aggregationContext, Aggregator parent,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
}
}

View File

@ -23,8 +23,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,9 @@ public class InternalChildren extends InternalSingleBucketAggregation implements
public InternalChildren() {
}
public InternalChildren(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, docCount, aggregations, metaData);
public InternalChildren(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, docCount, aggregations, reducers, metaData);
}
@Override
@ -60,6 +63,6 @@ public class InternalChildren extends InternalSingleBucketAggregation implements
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalChildren(name, docCount, subAggregations, getMetaData());
return new InternalChildren(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -36,6 +36,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -70,8 +71,9 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext,
Aggregator parent, String parentType, Filter childFilter, Filter parentFilter,
ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, long maxOrd, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource,
long maxOrd, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.parentType = parentType;
// these two filters are cached in the parser
this.childFilter = childFilter;
@ -84,12 +86,13 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalChildren(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalChildren(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalChildren(name, 0, buildEmptySubAggregations(), metaData());
return new InternalChildren(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
@Override
@ -199,21 +202,25 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new NonCollectingAggregator(name, aggregationContext, parent, metaData) {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new NonCollectingAggregator(name, aggregationContext, parent, reducers, metaData) {
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalChildren(name, 0, buildEmptySubAggregations(), metaData());
return new InternalChildren(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
};
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource,
AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
long maxOrd = valuesSource.globalMaxOrd(aggregationContext.searchContext().searcher(), parentType);
return new ParentToChildrenAggregator(name, factories, aggregationContext, parent, parentType, childFilter, parentFilter, valuesSource, maxOrd, metaData);
return new ParentToChildrenAggregator(name, factories, aggregationContext, parent, parentType, childFilter, parentFilter,
valuesSource, maxOrd, reducers, metaData);
}
}

View File

@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -29,9 +30,11 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -45,9 +48,9 @@ public class FilterAggregator extends SingleBucketAggregator {
org.apache.lucene.search.Filter filter,
AggregatorFactories factories,
AggregationContext aggregationContext,
Aggregator parent,
Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
super(name, factories, aggregationContext, parent, reducers, metaData);
this.filter = filter;
}
@ -58,23 +61,24 @@ public class FilterAggregator extends SingleBucketAggregator {
// no need to provide deleted docs to the filter
final Bits bits = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filter.getDocIdSet(ctx, null));
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bits.get(doc)) {
if (bits.get(doc)) {
collectBucket(sub, doc, bucket);
}
}
}
};
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalFilter(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalFilter(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalFilter(name, 0, buildEmptySubAggregations(), metaData());
return new InternalFilter(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
public static class Factory extends AggregatorFactory {
@ -87,8 +91,9 @@ public class FilterAggregator extends SingleBucketAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new FilterAggregator(name, filter, factories, context, parent, metaData);
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new FilterAggregator(name, filter, factories, context, parent, reducers, metaData);
}
}

View File

@ -22,8 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -48,8 +50,8 @@ public class InternalFilter extends InternalSingleBucketAggregation implements F
InternalFilter() {} // for serialization
InternalFilter(String name, long docCount, InternalAggregations subAggregations, Map<String, Object> metaData) {
super(name, docCount, subAggregations, metaData);
InternalFilter(String name, long docCount, InternalAggregations subAggregations, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, docCount, subAggregations, reducers, metaData);
}
@Override
@ -59,6 +61,6 @@ public class InternalFilter extends InternalSingleBucketAggregation implements F
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalFilter(name, docCount, subAggregations, getMetaData());
return new InternalFilter(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -33,6 +34,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
@ -59,8 +61,9 @@ public class FiltersAggregator extends BucketsAggregator {
private final boolean keyed;
public FiltersAggregator(String name, AggregatorFactories factories, List<KeyedFilter> filters, boolean keyed, AggregationContext aggregationContext,
Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.keyed = keyed;
this.filters = filters.toArray(new KeyedFilter[filters.size()]);
}
@ -73,16 +76,16 @@ public class FiltersAggregator extends BucketsAggregator {
final Bits[] bits = new Bits[filters.length];
for (int i = 0; i < filters.length; ++i) {
bits[i] = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].filter.getDocIdSet(ctx, null));
}
}
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
for (int i = 0; i < bits.length; i++) {
if (bits[i].get(doc)) {
for (int i = 0; i < bits.length; i++) {
if (bits[i].get(doc)) {
collectBucket(sub, doc, bucketOrd(bucket, i));
}
}
}
}
};
}
@ -95,7 +98,7 @@ public class FiltersAggregator extends BucketsAggregator {
InternalFilters.Bucket bucket = new InternalFilters.Bucket(filter.key, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed);
buckets.add(bucket);
}
return new InternalFilters(name, buckets, keyed, metaData());
return new InternalFilters(name, buckets, keyed, reducers(), metaData());
}
@Override
@ -106,7 +109,7 @@ public class FiltersAggregator extends BucketsAggregator {
InternalFilters.Bucket bucket = new InternalFilters.Bucket(filters[i].key, 0, subAggs, keyed);
buckets.add(bucket);
}
return new InternalFilters(name, buckets, keyed, metaData());
return new InternalFilters(name, buckets, keyed, reducers(), metaData());
}
final long bucketOrd(long owningBucketOrdinal, int filterOrd) {
@ -125,8 +128,9 @@ public class FiltersAggregator extends BucketsAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new FiltersAggregator(name, factories, filters, keyed, context, parent, metaData);
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new FiltersAggregator(name, factories, filters, keyed, context, parent, reducers, metaData);
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -163,8 +164,8 @@ public class InternalFilters extends InternalMultiBucketAggregation implements F
public InternalFilters() {} // for serialization
public InternalFilters(String name, List<Bucket> buckets, boolean keyed, Map<String, Object> metaData) {
super(name, metaData);
public InternalFilters(String name, List<Bucket> buckets, boolean keyed, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.buckets = buckets;
this.keyed = keyed;
}
@ -211,7 +212,7 @@ public class InternalFilters extends InternalMultiBucketAggregation implements F
}
}
InternalFilters reduced = new InternalFilters(name, new ArrayList<Bucket>(bucketsList.size()), keyed, getMetaData());
InternalFilters reduced = new InternalFilters(name, new ArrayList<Bucket>(bucketsList.size()), keyed, reducers(), getMetaData());
for (List<Bucket> sameRangeList : bucketsList) {
reduced.buckets.add((sameRangeList.get(0)).reduce(sameRangeList, reduceContext));
}

View File

@ -28,12 +28,14 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,10 @@ public class GeoHashGridAggregator extends BucketsAggregator {
private final LongHash bucketOrds;
public GeoHashGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource,
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
int requiredSize,
int shardSize, AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.requiredSize = requiredSize;
this.shardSize = shardSize;
@ -126,12 +130,12 @@ public class GeoHashGridAggregator extends BucketsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket;
}
return new InternalGeoHashGrid(name, requiredSize, Arrays.asList(list), metaData());
return new InternalGeoHashGrid(name, requiredSize, Arrays.asList(list), reducers(), metaData());
}
@Override
public InternalGeoHashGrid buildEmptyAggregation() {
return new InternalGeoHashGrid(name, requiredSize, Collections.<InternalGeoHashGrid.Bucket>emptyList(), metaData());
return new InternalGeoHashGrid(name, requiredSize, Collections.<InternalGeoHashGrid.Bucket> emptyList(), reducers(), metaData());
}

View File

@ -34,6 +34,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -43,6 +44,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -123,9 +125,11 @@ public class GeoHashGridParser implements Aggregator.Parser {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize, Collections.<InternalGeoHashGrid.Bucket>emptyList(), metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, metaData) {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize,
Collections.<InternalGeoHashGrid.Bucket> emptyList(), reducers, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, reducers, metaData) {
public InternalAggregation buildEmptyAggregation() {
return aggregation;
}
@ -133,12 +137,15 @@ public class GeoHashGridParser implements Aggregator.Parser {
}
@Override
protected Aggregator doCreateInternal(final ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(final ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext,
Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
ValuesSource.Numeric cellIdSource = new CellIdSource(valuesSource, precision);
return new GeoHashGridAggregator(name, factories, cellIdSource, requiredSize, shardSize, aggregationContext, parent, metaData);
return new GeoHashGridAggregator(name, factories, cellIdSource, requiredSize, shardSize, aggregationContext, parent, reducers,
metaData);
}

View File

@ -32,6 +32,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -170,8 +171,9 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
InternalGeoHashGrid() {
} // for serialization
public InternalGeoHashGrid(String name, int requiredSize, Collection<Bucket> buckets, Map<String, Object> metaData) {
super(name, metaData);
public InternalGeoHashGrid(String name, int requiredSize, Collection<Bucket> buckets, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, reducers, metaData);
this.requiredSize = requiredSize;
this.buckets = buckets;
}
@ -218,7 +220,7 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = ordered.pop();
}
return new InternalGeoHashGrid(getName(), requiredSize, Arrays.asList(list), getMetaData());
return new InternalGeoHashGrid(getName(), requiredSize, Arrays.asList(list), reducers(), getMetaData());
}
@Override

View File

@ -28,9 +28,11 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -38,8 +40,9 @@ import java.util.Map;
*/
public class GlobalAggregator extends SingleBucketAggregator {
public GlobalAggregator(String name, AggregatorFactories subFactories, AggregationContext aggregationContext, Map<String, Object> metaData) throws IOException {
super(name, subFactories, aggregationContext, null, metaData);
public GlobalAggregator(String name, AggregatorFactories subFactories, AggregationContext aggregationContext, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, subFactories, aggregationContext, null, reducers, metaData);
}
@Override
@ -50,14 +53,15 @@ public class GlobalAggregator extends SingleBucketAggregator {
public void collect(int doc, long bucket) throws IOException {
assert bucket == 0 : "global aggregator can only be a top level aggregator";
collectBucket(sub, doc, bucket);
}
}
};
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
assert owningBucketOrdinal == 0 : "global aggregator can only be a top level aggregator";
return new InternalGlobal(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalGlobal(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
@ -72,7 +76,8 @@ public class GlobalAggregator extends SingleBucketAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (parent != null) {
throw new AggregationExecutionException("Aggregation [" + parent.name() + "] cannot have a global " +
"sub-aggregation [" + name + "]. Global aggregations can only be defined as top level aggregations");
@ -80,7 +85,7 @@ public class GlobalAggregator extends SingleBucketAggregator {
if (collectsFromSingleBucket == false) {
throw new ElasticsearchIllegalStateException();
}
return new GlobalAggregator(name, factories, context, metaData);
return new GlobalAggregator(name, factories, context, reducers, metaData);
}
}

View File

@ -22,8 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,8 @@ public class InternalGlobal extends InternalSingleBucketAggregation implements G
InternalGlobal() {} // for serialization
InternalGlobal(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, docCount, aggregations, metaData);
InternalGlobal(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, docCount, aggregations, reducers, metaData);
}
@Override
@ -60,6 +62,6 @@ public class InternalGlobal extends InternalSingleBucketAggregation implements G
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalGlobal(name, docCount, subAggregations, getMetaData());
return new InternalGlobal(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -62,9 +63,10 @@ public class HistogramAggregator extends BucketsAggregator {
boolean keyed, long minDocCount, @Nullable ExtendedBounds extendedBounds,
@Nullable ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
InternalHistogram.Factory<?> histogramFactory,
AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
super(name, factories, aggregationContext, parent, reducers, metaData);
this.rounding = rounding;
this.order = order;
this.keyed = keyed;
@ -130,13 +132,14 @@ public class HistogramAggregator extends BucketsAggregator {
// value source will be null for unmapped fields
InternalHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 ? new InternalHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) : null;
return histogramFactory.create(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metaData());
return histogramFactory.create(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
InternalHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 ? new InternalHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) : null;
return histogramFactory.create(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, metaData());
return histogramFactory.create(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, reducers(),
metaData());
}
@Override
@ -167,12 +170,15 @@ public class HistogramAggregator extends BucketsAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, config.formatter(), histogramFactory, aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, null, null, config.formatter(),
histogramFactory, aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
@ -185,7 +191,8 @@ public class HistogramAggregator extends BucketsAggregator {
extendedBounds.processAndValidate(name, aggregationContext.searchContext(), config.parser());
roundedBounds = extendedBounds.round(rounding);
}
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource, config.formatter(), histogramFactory, aggregationContext, parent, metaData);
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, roundedBounds, valuesSource,
config.formatter(), histogramFactory, aggregationContext, parent, reducers, metaData);
}
}

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.EmptyBucketInfo;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -75,8 +76,10 @@ public class InternalDateHistogram {
@Override
public InternalHistogram create(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order,
long minDocCount, EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, metaData);
long minDocCount,
EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, reducers, metaData);
}
@Override

View File

@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -233,8 +234,9 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
}
public InternalHistogram<B> create(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, metaData);
EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, reducers, metaData);
}
public B createBucket(long key, long docCount, InternalAggregations aggregations, boolean keyed, @Nullable ValueFormatter formatter) {
@ -259,8 +261,8 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
InternalHistogram(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo,
@Nullable ValueFormatter formatter, boolean keyed, Factory<B> factory, Map<String, Object> metaData) {
super(name, metaData);
@Nullable ValueFormatter formatter, boolean keyed, Factory<B> factory, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.buckets = buckets;
this.order = order;
assert (minDocCount == 0) == (emptyBucketInfo != null);
@ -432,7 +434,8 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
CollectionUtil.introSort(reducedBuckets, order.comparator());
}
return getFactory().create(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, formatter, keyed, getMetaData());
return getFactory().create(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, formatter, keyed, reducers(),
getMetaData());
}
@Override

View File

@ -22,8 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -50,8 +52,8 @@ public class InternalMissing extends InternalSingleBucketAggregation implements
InternalMissing() {
}
InternalMissing(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, docCount, aggregations, metaData);
InternalMissing(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, docCount, aggregations, reducers, metaData);
}
@Override
@ -61,6 +63,6 @@ public class InternalMissing extends InternalSingleBucketAggregation implements
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalMissing(name, docCount, subAggregations, getMetaData());
return new InternalMissing(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -26,12 +26,14 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -42,8 +44,9 @@ public class MissingAggregator extends SingleBucketAggregator {
private final ValuesSource valuesSource;
public MissingAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
}
@ -69,12 +72,13 @@ public class MissingAggregator extends SingleBucketAggregator {
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalMissing(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalMissing(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalMissing(name, 0, buildEmptySubAggregations(), metaData());
return new InternalMissing(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource> {
@ -84,13 +88,15 @@ public class MissingAggregator extends SingleBucketAggregator {
}
@Override
protected MissingAggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new MissingAggregator(name, factories, null, aggregationContext, parent, metaData);
protected MissingAggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new MissingAggregator(name, factories, null, aggregationContext, parent, reducers, metaData);
}
@Override
protected MissingAggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new MissingAggregator(name, factories, valuesSource, aggregationContext, parent, metaData);
protected MissingAggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new MissingAggregator(name, factories, valuesSource, aggregationContext, parent, reducers, metaData);
}
}

View File

@ -22,8 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,9 @@ public class InternalNested extends InternalSingleBucketAggregation implements N
public InternalNested() {
}
public InternalNested(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, docCount, aggregations, metaData);
public InternalNested(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, docCount, aggregations, reducers, metaData);
}
@Override
@ -60,6 +63,6 @@ public class InternalNested extends InternalSingleBucketAggregation implements N
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalNested(name, docCount, subAggregations, getMetaData());
return new InternalNested(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -22,8 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,9 @@ public class InternalReverseNested extends InternalSingleBucketAggregation imple
public InternalReverseNested() {
}
public InternalReverseNested(String name, long docCount, InternalAggregations aggregations, Map<String, Object> metaData) {
super(name, docCount, aggregations, metaData);
public InternalReverseNested(String name, long docCount, InternalAggregations aggregations, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, docCount, aggregations, reducers, metaData);
}
@Override
@ -60,6 +63,6 @@ public class InternalReverseNested extends InternalSingleBucketAggregation imple
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
return new InternalReverseNested(name, docCount, subAggregations, getMetaData());
return new InternalReverseNested(name, docCount, subAggregations, reducers(), getMetaData());
}
}

View File

@ -39,9 +39,11 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -55,8 +57,8 @@ public class NestedAggregator extends SingleBucketAggregator {
private DocIdSetIterator childDocs;
private BitSet parentDocs;
public NestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parentAggregator, Map<String, Object> metaData, FilterCachingPolicy filterCachingPolicy) throws IOException {
super(name, factories, aggregationContext, parentAggregator, metaData);
public NestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parentAggregator, List<Reducer> reducers, Map<String, Object> metaData, FilterCachingPolicy filterCachingPolicy) throws IOException {
super(name, factories, aggregationContext, parentAggregator, reducers, metaData);
childFilter = aggregationContext.searchContext().filterCache().cache(objectMapper.nestedTypeFilter(), null, filterCachingPolicy);
}
@ -64,68 +66,69 @@ public class NestedAggregator extends SingleBucketAggregator {
public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
// Reset parentFilter, so we resolve the parentDocs for each new segment being searched
this.parentFilter = null;
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null);
if (DocIdSets.isEmpty(childDocIdSet)) {
childDocs = null;
} else {
childDocs = childDocIdSet.iterator();
}
if (DocIdSets.isEmpty(childDocIdSet)) {
childDocs = null;
} else {
childDocs = childDocIdSet.iterator();
}
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int parentDoc, long bucket) throws IOException {
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected
// if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip:
if (parentDoc == 0 || childDocs == null) {
return;
}
if (parentFilter == null) {
// The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs
// So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed.
// So the trick is to set at the last moment just before needed and we can use its child filter as the
// parent filter.
// if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip:
if (parentDoc == 0 || childDocs == null) {
return;
}
if (parentFilter == null) {
// The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs
// So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed.
// So the trick is to set at the last moment just before needed and we can use its child filter as the
// parent filter.
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution
Filter parentFilterNotCached = findClosestNestedPath(parent());
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
}
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
}
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);
if (DocIdSets.isEmpty(parentSet)) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null;
return;
} else {
parentDocs = parentSet.bits();
}
}
if (DocIdSets.isEmpty(parentSet)) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null;
return;
} else {
parentDocs = parentSet.bits();
}
}
final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
int childDocId = childDocs.docID();
if (childDocId <= prevParentDoc) {
childDocId = childDocs.advance(prevParentDoc + 1);
}
final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
int childDocId = childDocs.docID();
if (childDocId <= prevParentDoc) {
childDocId = childDocs.advance(prevParentDoc + 1);
}
for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) {
for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) {
collectBucket(sub, childDocId, bucket);
}
}
}
};
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalNested(name, 0, buildEmptySubAggregations(), metaData());
return new InternalNested(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
private static Filter findClosestNestedPath(Aggregator parent) {
@ -151,33 +154,35 @@ public class NestedAggregator extends SingleBucketAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, context, parent);
}
MapperService.SmartNameObjectMapper mapper = context.searchContext().smartNameObjectMapper(path);
if (mapper == null) {
return new Unmapped(name, context, parent, metaData);
return new Unmapped(name, context, parent, reducers, metaData);
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) {
return new Unmapped(name, context, parent, metaData);
return new Unmapped(name, context, parent, reducers, metaData);
}
if (!objectMapper.nested().isNested()) {
throw new AggregationExecutionException("[nested] nested path [" + path + "] is not nested");
}
return new NestedAggregator(name, factories, objectMapper, context, parent, metaData, filterCachingPolicy);
return new NestedAggregator(name, factories, objectMapper, context, parent, reducers, metaData, filterCachingPolicy);
}
private final static class Unmapped extends NonCollectingAggregator {
public Unmapped(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
public Unmapped(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, context, parent, reducers, metaData);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalNested(name, 0, buildEmptySubAggregations(), metaData());
return new InternalNested(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
}
}

View File

@ -40,9 +40,11 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -52,8 +54,10 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
private final BitDocIdSetFilter parentFilter;
public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper,
AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
if (objectMapper == null) {
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE);
} else {
@ -64,33 +68,33 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
@Override
protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
// In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
// In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx);
final BitSet parentDocs;
if (DocIdSets.isEmpty(docIdSet)) {
if (DocIdSets.isEmpty(docIdSet)) {
return LeafBucketCollector.NO_OP_COLLECTOR;
} else {
parentDocs = docIdSet.bits();
}
} else {
parentDocs = docIdSet.bits();
}
final LongIntOpenHashMap bucketOrdToLastCollectedParentDoc = new LongIntOpenHashMap(32);
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int childDoc, long bucket) throws IOException {
// fast forward to retrieve the parentDoc this childDoc belongs to
final int parentDoc = parentDocs.nextSetBit(childDoc);
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
// fast forward to retrieve the parentDoc this childDoc belongs to
final int parentDoc = parentDocs.nextSetBit(childDoc);
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
if (bucketOrdToLastCollectedParentDoc.containsKey(bucket)) {
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
if (parentDoc > lastCollectedParentDoc) {
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
if (parentDoc > lastCollectedParentDoc) {
collectBucket(sub, parentDoc, bucket);
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
}
} else {
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
}
} else {
collectBucket(sub, parentDoc, bucket);
bucketOrdToLastCollectedParentDoc.put(bucket, parentDoc);
}
}
}
}
};
}
@ -105,12 +109,13 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalReverseNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), metaData());
return new InternalReverseNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), metaData());
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
Filter getParentFilter() {
@ -127,7 +132,8 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
// Early validation
NestedAggregator closestNestedAggregator = findClosestNestedAggregator(parent);
if (closestNestedAggregator == null) {
@ -138,11 +144,11 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
if (path != null) {
MapperService.SmartNameObjectMapper mapper = context.searchContext().smartNameObjectMapper(path);
if (mapper == null) {
return new Unmapped(name, context, parent, metaData);
return new Unmapped(name, context, parent, reducers, metaData);
}
objectMapper = mapper.mapper();
if (objectMapper == null) {
return new Unmapped(name, context, parent, metaData);
return new Unmapped(name, context, parent, reducers, metaData);
}
if (!objectMapper.nested().isNested()) {
throw new AggregationExecutionException("[reverse_nested] nested path [" + path + "] is not nested");
@ -150,18 +156,19 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
} else {
objectMapper = null;
}
return new ReverseNestedAggregator(name, factories, objectMapper, context, parent, metaData);
return new ReverseNestedAggregator(name, factories, objectMapper, context, parent, reducers, metaData);
}
private final static class Unmapped extends NonCollectingAggregator {
public Unmapped(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
public Unmapped(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, context, parent, reducers, metaData);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), metaData());
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), reducers(), metaData());
}
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -219,8 +220,9 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
return TYPE.name();
}
public R create(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return (R) new InternalRange<>(name, ranges, formatter, keyed, metaData);
public R create(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
return (R) new InternalRange<>(name, ranges, formatter, keyed, reducers, metaData);
}
@ -236,8 +238,9 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
public InternalRange() {} // for serialization
public InternalRange(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
super(name, metaData);
public InternalRange(String name, List<B> ranges, @Nullable ValueFormatter formatter, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, reducers, metaData);
this.ranges = ranges;
this.formatter = formatter;
this.keyed = keyed;
@ -277,7 +280,7 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
for (int i = 0; i < this.ranges.size(); ++i) {
ranges.add((B) rangeList[i].get(0).reduce(rangeList[i], reduceContext));
}
return getFactory().create(name, ranges, formatter, keyed, getMetaData());
return getFactory().create(name, ranges, formatter, keyed, reducers(), getMetaData());
}
@Override

View File

@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -104,10 +105,10 @@ public class RangeAggregator extends BucketsAggregator {
List<Range> ranges,
boolean keyed,
AggregationContext aggregationContext,
Aggregator parent,
Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
super(name, factories, aggregationContext, parent, reducers, metaData);
assert valuesSource != null;
this.valuesSource = valuesSource;
this.formatter = format != null ? format.formatter() : null;
@ -139,64 +140,64 @@ public class RangeAggregator extends BucketsAggregator {
final LeafBucketCollector sub) throws IOException {
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
values.setDocument(doc);
final int valuesCount = values.count();
for (int i = 0, lo = 0; i < valuesCount; ++i) {
final double value = values.valueAt(i);
values.setDocument(doc);
final int valuesCount = values.count();
for (int i = 0, lo = 0; i < valuesCount; ++i) {
final double value = values.valueAt(i);
lo = collect(doc, value, bucket, lo);
}
}
}
private int collect(int doc, double value, long owningBucketOrdinal, int lowBound) throws IOException {
int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes
int mid = (lo + hi) >>> 1;
while (lo <= hi) {
if (value < ranges[mid].from) {
hi = mid - 1;
} else if (value >= maxTo[mid]) {
lo = mid + 1;
} else {
break;
}
mid = (lo + hi) >>> 1;
}
if (lo > hi) return lo; // no potential candidate
private int collect(int doc, double value, long owningBucketOrdinal, int lowBound) throws IOException {
int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes
int mid = (lo + hi) >>> 1;
while (lo <= hi) {
if (value < ranges[mid].from) {
hi = mid - 1;
} else if (value >= maxTo[mid]) {
lo = mid + 1;
} else {
break;
}
mid = (lo + hi) >>> 1;
}
if (lo > hi) return lo; // no potential candidate
// binary search the lower bound
int startLo = lo, startHi = mid;
while (startLo <= startHi) {
final int startMid = (startLo + startHi) >>> 1;
if (value >= maxTo[startMid]) {
startLo = startMid + 1;
} else {
startHi = startMid - 1;
}
}
// binary search the lower bound
int startLo = lo, startHi = mid;
while (startLo <= startHi) {
final int startMid = (startLo + startHi) >>> 1;
if (value >= maxTo[startMid]) {
startLo = startMid + 1;
} else {
startHi = startMid - 1;
}
}
// binary search the upper bound
int endLo = mid, endHi = hi;
while (endLo <= endHi) {
final int endMid = (endLo + endHi) >>> 1;
if (value < ranges[endMid].from) {
endHi = endMid - 1;
} else {
endLo = endMid + 1;
}
}
// binary search the upper bound
int endLo = mid, endHi = hi;
while (endLo <= endHi) {
final int endMid = (endLo + endHi) >>> 1;
if (value < ranges[endMid].from) {
endHi = endMid - 1;
} else {
endLo = endMid + 1;
}
}
assert startLo == lowBound || value >= maxTo[startLo - 1];
assert endHi == ranges.length - 1 || value < ranges[endHi + 1].from;
assert startLo == lowBound || value >= maxTo[startLo - 1];
assert endHi == ranges.length - 1 || value < ranges[endHi + 1].from;
for (int i = startLo; i <= endHi; ++i) {
if (ranges[i].matches(value)) {
for (int i = startLo; i <= endHi; ++i) {
if (ranges[i].matches(value)) {
collectBucket(sub, doc, subBucketOrdinal(owningBucketOrdinal, i));
}
}
return endHi + 1;
}
}
return endHi + 1;
}
};
}
@ -215,7 +216,7 @@ public class RangeAggregator extends BucketsAggregator {
buckets.add(bucket);
}
// value source can be null in the case of unmapped fields
return rangeFactory.create(name, buckets, formatter, keyed, metaData());
return rangeFactory.create(name, buckets, formatter, keyed, reducers(), metaData());
}
@Override
@ -229,7 +230,7 @@ public class RangeAggregator extends BucketsAggregator {
buckets.add(bucket);
}
// value source can be null in the case of unmapped fields
return rangeFactory.create(name, buckets, formatter, keyed, metaData());
return rangeFactory.create(name, buckets, formatter, keyed, reducers(), metaData());
}
private static final void sortRanges(final Range[] ranges) {
@ -266,10 +267,10 @@ public class RangeAggregator extends BucketsAggregator {
ValueFormat format,
AggregationContext context,
Aggregator parent,
InternalRange.Factory factory,
InternalRange.Factory factory, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
super(name, context, parent, reducers, metaData);
this.ranges = ranges;
ValueParser parser = format != null ? format.parser() : ValueParser.RAW;
for (Range range : this.ranges) {
@ -287,7 +288,7 @@ public class RangeAggregator extends BucketsAggregator {
for (RangeAggregator.Range range : ranges) {
buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, formatter));
}
return factory.create(name, buckets, formatter, keyed, metaData());
return factory.create(name, buckets, formatter, keyed, reducers(), metaData());
}
}
@ -305,13 +306,15 @@ public class RangeAggregator extends BucketsAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new Unmapped(name, ranges, keyed, config.format(), aggregationContext, parent, rangeFactory, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new Unmapped(name, ranges, keyed, config.format(), aggregationContext, parent, rangeFactory, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new RangeAggregator(name, factories, valuesSource, config.format(), rangeFactory, ranges, keyed, aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new RangeAggregator(name, factories, valuesSource, config.format(), rangeFactory, ranges, keyed, aggregationContext, parent, reducers, metaData);
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -120,8 +121,9 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> {
}
@Override
public InternalDateRange create(String name, List<InternalDateRange.Bucket> ranges, ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalDateRange(name, ranges, formatter, keyed, metaData);
public InternalDateRange create(String name, List<InternalDateRange.Bucket> ranges, ValueFormatter formatter, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
return new InternalDateRange(name, ranges, formatter, keyed, reducers, metaData);
}
@Override
@ -132,8 +134,9 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> {
InternalDateRange() {} // for serialization
InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
super(name, ranges, formatter, keyed, metaData);
InternalDateRange(String name, List<InternalDateRange.Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
super(name, ranges, formatter, keyed, reducers, metaData);
}
@Override

View File

@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.GeoPointParser;
import org.elasticsearch.search.aggregations.support.ValuesSource;
@ -179,14 +180,18 @@ public class GeoDistanceParser implements Aggregator.Parser {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new Unmapped(name, ranges, keyed, null, aggregationContext, parent, rangeFactory, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new Unmapped(name, ranges, keyed, null, aggregationContext, parent, rangeFactory, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(final ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(final ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext,
Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
DistanceSource distanceSource = new DistanceSource(valuesSource, distanceType, origin, unit);
return new RangeAggregator(name, factories, distanceSource, null, rangeFactory, ranges, keyed, aggregationContext, parent, metaData);
return new RangeAggregator(name, factories, distanceSource, null, rangeFactory, ranges, keyed, aggregationContext, parent,
reducers, metaData);
}
private static class DistanceSource extends ValuesSource.Numeric {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
@ -108,8 +109,9 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
}
@Override
public InternalGeoDistance create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalGeoDistance(name, ranges, formatter, keyed, metaData);
public InternalGeoDistance create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
return new InternalGeoDistance(name, ranges, formatter, keyed, reducers, metaData);
}
@Override
@ -120,8 +122,9 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
InternalGeoDistance() {} // for serialization
public InternalGeoDistance(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
super(name, ranges, formatter, keyed, metaData);
public InternalGeoDistance(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, ranges, formatter, keyed, reducers, metaData);
}
@Override

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
@ -117,8 +118,9 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> {
}
@Override
public InternalIPv4Range create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalIPv4Range(name, ranges, keyed, metaData);
public InternalIPv4Range create(String name, List<Bucket> ranges, @Nullable ValueFormatter formatter, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
return new InternalIPv4Range(name, ranges, keyed, reducers, metaData);
}
@Override
@ -129,8 +131,9 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> {
public InternalIPv4Range() {} // for serialization
public InternalIPv4Range(String name, List<InternalIPv4Range.Bucket> ranges, boolean keyed, Map<String, Object> metaData) {
super(name, ranges, ValueFormatter.IPv4, keyed, metaData);
public InternalIPv4Range(String name, List<InternalIPv4Range.Bucket> ranges, boolean keyed, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, ranges, ValueFormatter.IPv4, keyed, reducers, metaData);
}
@Override

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.internal.ContextIndexSearcher;
@ -36,6 +37,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -49,9 +51,10 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent,
SignificantTermsAggregatorFactory termsAggFactory, Map<String, Object> metaData) throws IOException {
SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent, SubAggCollectionMode.DEPTH_FIRST, false, metaData);
super(name, factories, valuesSource, maxOrd, null, bucketCountThresholds, includeExclude, aggregationContext, parent,
SubAggCollectionMode.DEPTH_FIRST, false, reducers, metaData);
this.termsAggFactory = termsAggFactory;
}
@ -62,8 +65,8 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
@Override
public void collect(int doc, long bucket) throws IOException {
super.collect(doc, bucket);
numCollectedDocs++;
}
numCollectedDocs++;
}
};
}
@ -124,7 +127,9 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
list[i] = bucket;
}
return new SignificantStringTerms(subsetSize, supersetSize, name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), metaData());
return new SignificantStringTerms(subsetSize, supersetSize, name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), reducers(),
metaData());
}
@Override
@ -133,7 +138,9 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
ContextIndexSearcher searcher = context.searchContext().searcher();
IndexReader topReader = searcher.getIndexReader();
int supersetSize = topReader.numDocs();
return new SignificantStringTerms(0, supersetSize, name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Collections.<InternalSignificantTerms.Bucket>emptyList(), metaData());
return new SignificantStringTerms(0, supersetSize, name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(),
Collections.<InternalSignificantTerms.Bucket> emptyList(), reducers(), metaData());
}
@Override
@ -145,8 +152,8 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
private final LongHash bucketOrds;
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggFactory, metaData);
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggFactory, reducers, metaData);
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
}
@ -157,20 +164,20 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
@Override
public void collect(int doc, long bucket) throws IOException {
assert bucket == 0;
numCollectedDocs++;
globalOrds.setDocument(doc);
final int numOrds = globalOrds.cardinality();
for (int i = 0; i < numOrds; i++) {
final long globalOrd = globalOrds.ordAt(i);
long bucketOrd = bucketOrds.add(globalOrd);
if (bucketOrd < 0) {
bucketOrd = -1 - bucketOrd;
numCollectedDocs++;
globalOrds.setDocument(doc);
final int numOrds = globalOrds.cardinality();
for (int i = 0; i < numOrds; i++) {
final long globalOrd = globalOrds.ordAt(i);
long bucketOrd = bucketOrds.add(globalOrd);
if (bucketOrd < 0) {
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
} else {
} else {
collectBucket(sub, doc, bucketOrd);
}
}
}
}
};
}

View File

@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.util.ArrayList;
import java.util.Arrays;
@ -122,8 +123,9 @@ public abstract class InternalSignificantTerms extends InternalMultiBucketAggreg
}
}
protected InternalSignificantTerms(long subsetSize, long supersetSize, String name, int requiredSize, long minDocCount, SignificanceHeuristic significanceHeuristic, List<Bucket> buckets, Map<String, Object> metaData) {
super(name, metaData);
protected InternalSignificantTerms(long subsetSize, long supersetSize, String name, int requiredSize, long minDocCount,
SignificanceHeuristic significanceHeuristic, List<Bucket> buckets, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.requiredSize = requiredSize;
this.minDocCount = minDocCount;
this.buckets = buckets;

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -159,9 +160,11 @@ public class SignificantLongTerms extends InternalSignificantTerms {
} // for serialization
public SignificantLongTerms(long subsetSize, long supersetSize, String name, @Nullable ValueFormatter formatter,
int requiredSize, long minDocCount, SignificanceHeuristic significanceHeuristic, List<InternalSignificantTerms.Bucket> buckets, Map<String, Object> metaData) {
int requiredSize,
long minDocCount, SignificanceHeuristic significanceHeuristic, List<InternalSignificantTerms.Bucket> buckets,
List<Reducer> reducers, Map<String, Object> metaData) {
super(subsetSize, supersetSize, name, requiredSize, minDocCount, significanceHeuristic, buckets, metaData);
super(subsetSize, supersetSize, name, requiredSize, minDocCount, significanceHeuristic, buckets, reducers, metaData);
this.formatter = formatter;
}
@ -173,7 +176,8 @@ public class SignificantLongTerms extends InternalSignificantTerms {
@Override
InternalSignificantTerms newAggregation(long subsetSize, long supersetSize,
List<InternalSignificantTerms.Bucket> buckets) {
return new SignificantLongTerms(subsetSize, supersetSize, getName(), formatter, requiredSize, minDocCount, significanceHeuristic, buckets, getMetaData());
return new SignificantLongTerms(subsetSize, supersetSize, getName(), formatter, requiredSize, minDocCount, significanceHeuristic,
buckets, reducers(), getMetaData());
}
@Override

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
@ -36,6 +37,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -45,9 +47,12 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
BucketCountThresholds bucketCountThresholds,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, IncludeExclude.LongFilter includeExclude, Map<String, Object> metaData) throws IOException {
AggregationContext aggregationContext,
Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, IncludeExclude.LongFilter includeExclude,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, format, null, bucketCountThresholds, aggregationContext, parent, SubAggCollectionMode.DEPTH_FIRST, false, includeExclude, metaData);
super(name, factories, valuesSource, format, null, bucketCountThresholds, aggregationContext, parent,
SubAggCollectionMode.DEPTH_FIRST, false, includeExclude, reducers, metaData);
this.termsAggFactory = termsAggFactory;
}
@ -102,7 +107,9 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
bucket.aggregations = bucketAggregations(bucket.bucketOrd);
list[i] = bucket;
}
return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), metaData());
return new SignificantLongTerms(subsetSize, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), reducers(),
metaData());
}
@Override
@ -111,7 +118,9 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
ContextIndexSearcher searcher = context.searchContext().searcher();
IndexReader topReader = searcher.getIndexReader();
int supersetSize = topReader.numDocs();
return new SignificantLongTerms(0, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Collections.<InternalSignificantTerms.Bucket>emptyList(), metaData());
return new SignificantLongTerms(0, supersetSize, name, formatter, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(),
Collections.<InternalSignificantTerms.Bucket> emptyList(), reducers(), metaData());
}
@Override

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -152,8 +153,10 @@ public class SignificantStringTerms extends InternalSignificantTerms {
SignificantStringTerms() {} // for serialization
public SignificantStringTerms(long subsetSize, long supersetSize, String name, int requiredSize,
long minDocCount, SignificanceHeuristic significanceHeuristic, List<InternalSignificantTerms.Bucket> buckets, Map<String, Object> metaData) {
super(subsetSize, supersetSize, name, requiredSize, minDocCount, significanceHeuristic, buckets, metaData);
long minDocCount,
SignificanceHeuristic significanceHeuristic, List<InternalSignificantTerms.Bucket> buckets, List<Reducer> reducers,
Map<String, Object> metaData) {
super(subsetSize, supersetSize, name, requiredSize, minDocCount, significanceHeuristic, buckets, reducers, metaData);
}
@Override
@ -164,7 +167,8 @@ public class SignificantStringTerms extends InternalSignificantTerms {
@Override
InternalSignificantTerms newAggregation(long subsetSize, long supersetSize,
List<InternalSignificantTerms.Bucket> buckets) {
return new SignificantStringTerms(subsetSize, supersetSize, getName(), requiredSize, minDocCount, significanceHeuristic, buckets, getMetaData());
return new SignificantStringTerms(subsetSize, supersetSize, getName(), requiredSize, minDocCount, significanceHeuristic, buckets,
reducers(), getMetaData());
}
@Override

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.internal.ContextIndexSearcher;
@ -35,6 +36,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -48,9 +50,11 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
public SignificantStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent,
SignificantTermsAggregatorFactory termsAggFactory, Map<String, Object> metaData) throws IOException {
SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent, SubAggCollectionMode.DEPTH_FIRST, false, metaData);
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent,
SubAggCollectionMode.DEPTH_FIRST, false, reducers, metaData);
this.termsAggFactory = termsAggFactory;
}
@ -107,7 +111,9 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
list[i] = bucket;
}
return new SignificantStringTerms(subsetSize, supersetSize, name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), metaData());
return new SignificantStringTerms(subsetSize, supersetSize, name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Arrays.asList(list), reducers(),
metaData());
}
@Override
@ -116,7 +122,9 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
ContextIndexSearcher searcher = context.searchContext().searcher();
IndexReader topReader = searcher.getIndexReader();
int supersetSize = topReader.numDocs();
return new SignificantStringTerms(0, supersetSize, name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(), Collections.<InternalSignificantTerms.Bucket>emptyList(), metaData());
return new SignificantStringTerms(0, supersetSize, name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), termsAggFactory.getSignificanceHeuristic(),
Collections.<InternalSignificantTerms.Bucket> emptyList(), reducers(), metaData());
}
@Override

View File

@ -39,6 +39,7 @@ import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -46,6 +47,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -64,8 +66,10 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, Map<String, Object> metaData) throws IOException {
return new SignificantStringTermsAggregator(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggregatorFactory, metaData);
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new SignificantStringTermsAggregator(name, factories, valuesSource, bucketCountThresholds, includeExclude,
aggregationContext, parent, termsAggregatorFactory, reducers, metaData);
}
},
@ -74,10 +78,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, Map<String, Object> metaData) throws IOException {
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
IndexSearcher indexSearcher = aggregationContext.searchContext().searcher();
return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggregatorFactory, metaData);
return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggregatorFactory, reducers, metaData);
}
},
@ -86,8 +91,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggregatorFactory, metaData);
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, includeExclude,
aggregationContext, parent, termsAggregatorFactory, reducers, metaData);
}
};
@ -108,7 +116,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, Map<String, Object> metaData) throws IOException;
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException;
@Override
public String toString() {
@ -145,9 +154,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new UnmappedSignificantTerms(name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, metaData) {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new UnmappedSignificantTerms(name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), reducers, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, reducers, metaData) {
@Override
public InternalAggregation buildEmptyAggregation() {
return aggregation;
@ -156,7 +167,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
}
@Override
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
@ -179,7 +191,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
}
}
assert execution != null;
return execution.create(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, this, metaData);
return execution.create(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, this,
reducers, metaData);
}
@ -197,7 +210,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter();
}
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), bucketCountThresholds, aggregationContext, parent, this, longFilter, metaData);
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
bucketCountThresholds, aggregationContext, parent, this, longFilter, reducers, metaData);
}
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -24,9 +24,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -56,10 +56,10 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms {
UnmappedSignificantTerms() {} // for serialization
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, Map<String, Object> metaData) {
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
//We pass zero for index/subset sizes because for the purpose of significant term analysis
// we assume an unmapped index's size is irrelevant to the proceedings.
super(0, 0, name, requiredSize, minDocCount, JLHScore.INSTANCE, BUCKETS, metaData);
super(0, 0, name, requiredSize, minDocCount, JLHScore.INSTANCE, BUCKETS, reducers, metaData);
}
@Override

View File

@ -22,27 +22,30 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
abstract class AbstractStringTermsAggregator extends TermsAggregator {
protected final boolean showTermDocCountError;
public AbstractStringTermsAggregator(String name, AggregatorFactories factories,
AggregationContext context, Aggregator parent,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, bucketCountThresholds, order, subAggCollectMode, metaData);
public AbstractStringTermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent,
Terms.Order order, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, bucketCountThresholds, order, subAggCollectMode, reducers, metaData);
this.showTermDocCountError = showTermDocCountError;
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket>emptyList(), showTermDocCountError, 0, 0, metaData());
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket> emptyList(), showTermDocCountError, 0, 0,
reducers(), metaData());
}
}

View File

@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -156,8 +157,11 @@ public class DoubleTerms extends InternalTerms {
DoubleTerms() {} // for serialization
public DoubleTerms(String name, Terms.Order order, @Nullable ValueFormatter formatter, int requiredSize, int shardSize, long minDocCount, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
public DoubleTerms(String name, Terms.Order order, @Nullable ValueFormatter formatter, int requiredSize, int shardSize,
long minDocCount, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
List<Reducer> reducers, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, reducers,
metaData);
this.formatter = formatter;
}
@ -167,8 +171,10 @@ public class DoubleTerms extends InternalTerms {
}
@Override
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
return new DoubleTerms(name, order, formatter, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError,
long docCountError, long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
return new DoubleTerms(name, order, formatter, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError,
otherDocCount, reducers, metaData);
}
@Override

View File

@ -26,6 +26,7 @@ import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
@ -33,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -41,8 +43,11 @@ import java.util.Map;
public class DoubleTermsAggregator extends LongTermsAggregator {
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, format, order, bucketCountThresholds, aggregationContext, parent, collectionMode, showTermDocCountError, longFilter, metaData);
Terms.Order order, BucketCountThresholds bucketCountThresholds,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError,
IncludeExclude.LongFilter longFilter, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, format, order, bucketCountThresholds, aggregationContext, parent, collectionMode,
showTermDocCountError, longFilter, reducers, metaData);
}
@Override
@ -73,7 +78,9 @@ public class DoubleTermsAggregator extends LongTermsAggregator {
for (int i = 0; i < buckets.length; ++i) {
buckets[i] = convertToDouble(buckets[i]);
}
return new DoubleTerms(terms.getName(), terms.order, terms.formatter, terms.requiredSize, terms.shardSize, terms.minDocCount, Arrays.asList(buckets), terms.showTermDocCountError, terms.docCountError, terms.otherDocCount, terms.getMetaData());
return new DoubleTerms(terms.getName(), terms.order, terms.formatter, terms.requiredSize, terms.shardSize, terms.minDocCount,
Arrays.asList(buckets), terms.showTermDocCountError, terms.docCountError, terms.otherDocCount, terms.reducers(),
terms.getMetaData());
}
}

View File

@ -44,11 +44,13 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -71,8 +73,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError, metaData);
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError, reducers, reducers, metaData);
this.valuesSource = valuesSource;
this.includeExclude = includeExclude;
}
@ -196,7 +198,9 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
bucket.docCountError = 0;
}
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, metaData());
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, reducers(),
metaData());
}
/**
@ -261,8 +265,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
Terms.Order order, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext,
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode, showTermDocCountError, metaData);
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode, showTermDocCountError, reducers, metaData);
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
}
@ -329,8 +333,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
private RandomAccessOrds segmentOrds;
public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource,
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode, showTermDocCountError, metaData);
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode, showTermDocCountError, reducers, metaData);
assert factories == null || factories.count() == 0;
this.segmentDocCounts = context.bigArrays().newIntArray(1, true);
}
@ -409,7 +413,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
}
final long ord = i - 1; // remember we do +1 when counting
final long globalOrd = mapping == null ? ord : mapping.getGlobalOrd(ord);
incrementBucketDocCount(globalOrd, inc);
incrementBucketDocCount(globalOrd, inc);
}
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.ArrayList;
@ -121,8 +122,9 @@ public abstract class InternalTerms extends InternalMultiBucketAggregation imple
protected InternalTerms() {} // for serialization
protected InternalTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, List<Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
super(name, metaData);
protected InternalTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, List<Bucket> buckets,
boolean showTermDocCountError, long docCountError, long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.order = order;
this.requiredSize = requiredSize;
this.shardSize = shardSize;
@ -220,9 +222,10 @@ public abstract class InternalTerms extends InternalMultiBucketAggregation imple
} else {
docCountError = aggregations.size() == 1 ? 0 : sumDocCountError;
}
return newAggregation(name, Arrays.asList(list), showTermDocCountError, docCountError, otherDocCount, getMetaData());
return newAggregation(name, Arrays.asList(list), showTermDocCountError, docCountError, otherDocCount, reducers(), getMetaData());
}
protected abstract InternalTerms newAggregation(String name, List<Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData);
protected abstract InternalTerms newAggregation(String name, List<Bucket> buckets, boolean showTermDocCountError, long docCountError,
long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData);
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -155,8 +156,11 @@ public class LongTerms extends InternalTerms {
LongTerms() {} // for serialization
public LongTerms(String name, Terms.Order order, @Nullable ValueFormatter formatter, int requiredSize, int shardSize, long minDocCount, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
public LongTerms(String name, Terms.Order order, @Nullable ValueFormatter formatter, int requiredSize, int shardSize, long minDocCount,
List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
List<Reducer> reducers, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, reducers,
metaData);
this.formatter = formatter;
}
@ -166,8 +170,10 @@ public class LongTerms extends InternalTerms {
}
@Override
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
return new LongTerms(name, order, formatter, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError,
long docCountError, long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
return new LongTerms(name, order, formatter, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError,
otherDocCount, reducers, metaData);
}
@Override

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude.LongFilter;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
@ -39,6 +40,7 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
@ -53,15 +55,17 @@ public class LongTermsAggregator extends TermsAggregator {
private LongFilter longFilter;
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, bucketCountThresholds, order, subAggCollectMode, metaData);
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent,
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, bucketCountThresholds, order, subAggCollectMode, reducers, metaData);
this.valuesSource = valuesSource;
this.showTermDocCountError = showTermDocCountError;
this.formatter = format != null ? format.formatter() : null;
this.longFilter = longFilter;
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
}
@Override
public boolean needsScores() {
return (valuesSource != null && valuesSource.needsScores()) || super.needsScores();
@ -76,30 +80,30 @@ public class LongTermsAggregator extends TermsAggregator {
final LeafBucketCollector sub) throws IOException {
final SortedNumericDocValues values = getValues(valuesSource, ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
assert owningBucketOrdinal == 0;
values.setDocument(doc);
final int valuesCount = values.count();
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
assert owningBucketOrdinal == 0;
values.setDocument(doc);
final int valuesCount = values.count();
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
if (previous != val || i == 0) {
if ((longFilter == null) || (longFilter.accept(val))) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
if (previous != val || i == 0) {
if ((longFilter == null) || (longFilter.accept(val))) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
collectExistingBucket(sub, doc, bucketOrdinal);
} else {
} else {
collectBucket(sub, doc, bucketOrdinal);
}
}
previous = val;
}
}
}
previous = val;
}
}
}
};
}
@ -148,7 +152,7 @@ public class LongTermsAggregator extends TermsAggregator {
list[i] = bucket;
otherDocCount -= bucket.docCount;
}
runDeferredCollections(survivingBucketOrds);
//Now build the aggs
@ -156,14 +160,18 @@ public class LongTermsAggregator extends TermsAggregator {
list[i].aggregations = bucketAggregations(list[i].bucketOrd);
list[i].docCountError = 0;
}
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, metaData());
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket>emptyList(), showTermDocCountError, 0, 0, metaData());
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Collections.<InternalTerms.Bucket> emptyList(), showTermDocCountError, 0, 0,
reducers(), metaData());
}
@Override

View File

@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -150,8 +151,11 @@ public class StringTerms extends InternalTerms {
StringTerms() {} // for serialization
public StringTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
public StringTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount,
List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount,
List<Reducer> reducers, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, reducers,
metaData);
}
@Override
@ -160,8 +164,10 @@ public class StringTerms extends InternalTerms {
}
@Override
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
return new StringTerms(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, metaData);
protected InternalTerms newAggregation(String name, List<InternalTerms.Bucket> buckets, boolean showTermDocCountError,
long docCountError, long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
return new StringTerms(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError,
otherDocCount, reducers, metaData);
}
@Override

View File

@ -31,11 +31,13 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -49,9 +51,12 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
IncludeExclude includeExclude, AggregationContext aggregationContext,
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError, metaData);
super(name, factories, aggregationContext, parent, order, bucketCountThresholds, collectionMode, showTermDocCountError, reducers,
metaData);
this.valuesSource = valuesSource;
this.includeExclude = includeExclude;
bucketOrds = new BytesRefHash(1, aggregationContext.bigArrays());
@ -158,7 +163,9 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
bucket.docCountError = 0;
}
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, metaData());
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, reducers(),
metaData());
}
@Override

View File

@ -28,11 +28,13 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.InternalOrder.Aggregation;
import org.elasticsearch.search.aggregations.bucket.terms.InternalOrder.CompoundOrder;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -135,8 +137,8 @@ public abstract class TermsAggregator extends BucketsAggregator {
protected final Set<Aggregator> aggsUsedForSorting = new HashSet<>();
protected final SubAggCollectionMode collectMode;
public TermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, BucketCountThresholds bucketCountThresholds, Terms.Order order, SubAggCollectionMode collectMode, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, metaData);
public TermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, BucketCountThresholds bucketCountThresholds, Terms.Order order, SubAggCollectionMode collectMode, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, context, parent, reducers, metaData);
this.bucketCountThresholds = bucketCountThresholds;
this.order = InternalOrder.validate(order, this);
this.collectMode = collectMode;

View File

@ -1,4 +1,4 @@
/*
List</*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
@ -29,12 +29,14 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,10 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
return new StringTermsAggregator(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, metaData);
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new StringTermsAggregator(name, factories, valuesSource, order, bucketCountThresholds, includeExclude,
aggregationContext, parent, subAggCollectMode, showTermDocCountError, reducers, metaData);
}
@Override
@ -64,8 +68,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, metaData);
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, reducers, metaData);
}
@Override
@ -79,8 +83,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, metaData);
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, reducers, metaData);
}
@Override
@ -93,11 +97,12 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException {
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (includeExclude != null || factories.count() > 0) {
return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, metaData);
return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError, reducers, metaData);
}
return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode, showTermDocCountError, metaData);
return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode, showTermDocCountError, reducers, metaData);
}
@Override
@ -124,7 +129,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent,
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, Map<String, Object> metaData) throws IOException;
SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, List<Reducer> reducers, Map<String, Object> metaData) throws IOException;
abstract boolean needsGlobalOrdinals();
@ -152,9 +157,11 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, factories, metaData) {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), reducers, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, factories, reducers, metaData) {
{
// even in the case of an unmapped aggregator, validate the order
InternalOrder.validate(order, this);
@ -167,7 +174,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
@Override
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
@ -217,7 +225,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
assert execution != null;
return execution.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectMode, showTermDocCountError, metaData);
return execution.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectMode, showTermDocCountError, reducers, metaData);
}
if ((includeExclude != null) && (includeExclude.isRegexBased())) {
@ -233,13 +241,14 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
order, bucketCountThresholds, aggregationContext, parent, collectMode,
showTermDocCountError, longFilter, metaData);
showTermDocCountError, longFilter, reducers,
metaData);
}
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter();
}
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
order, bucketCountThresholds, aggregationContext, parent, collectMode, showTermDocCountError, longFilter, metaData);
order, bucketCountThresholds, aggregationContext, parent, collectMode, showTermDocCountError, longFilter, reducers, metaData);
}
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.Collections;
@ -54,8 +55,9 @@ public class UnmappedTerms extends InternalTerms {
UnmappedTerms() {} // for serialization
public UnmappedTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, BUCKETS, false, 0, 0, metaData);
public UnmappedTerms(String name, Terms.Order order, int requiredSize, int shardSize, long minDocCount, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, order, requiredSize, shardSize, minDocCount, BUCKETS, false, 0, 0, reducers, metaData);
}
@Override
@ -91,7 +93,8 @@ public class UnmappedTerms extends InternalTerms {
}
@Override
protected InternalTerms newAggregation(String name, List<Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, Map<String, Object> metaData) {
protected InternalTerms newAggregation(String name, List<Bucket> buckets, boolean showTermDocCountError, long docCountError,
long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) {
throw new UnsupportedOperationException("How did you get there?");
}

View File

@ -20,14 +20,16 @@
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.util.List;
import java.util.Map;
public abstract class InternalMetricsAggregation extends InternalAggregation {
protected InternalMetricsAggregation() {} // for serialization
protected InternalMetricsAggregation(String name, Map<String, Object> metaData) {
super(name, metaData);
protected InternalMetricsAggregation(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.List;
@ -35,8 +36,8 @@ public abstract class InternalNumericMetricsAggregation extends InternalMetricsA
protected SingleValue() {}
protected SingleValue(String name, Map<String, Object> metaData) {
super(name, metaData);
protected SingleValue(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
public String getValueAsString() {
@ -64,8 +65,8 @@ public abstract class InternalNumericMetricsAggregation extends InternalMetricsA
protected MultiValue() {}
protected MultiValue(String name, Map<String, Object> metaData) {
super(name, metaData);
protected MultiValue(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
public abstract double value(String name);
@ -92,8 +93,8 @@ public abstract class InternalNumericMetricsAggregation extends InternalMetricsA
private InternalNumericMetricsAggregation() {} // for serialization
private InternalNumericMetricsAggregation(String name, Map<String, Object> metaData) {
super(name, metaData);
private InternalNumericMetricsAggregation(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
}

View File

@ -22,14 +22,17 @@ package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorBase;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public abstract class MetricsAggregator extends AggregatorBase {
protected MetricsAggregator(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, AggregatorFactories.EMPTY, context, parent, metaData);
protected MetricsAggregator(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, AggregatorFactories.EMPTY, context, parent, reducers, metaData);
}
}

View File

@ -19,9 +19,11 @@
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -29,14 +31,16 @@ import java.util.Map;
*/
public abstract class NumericMetricsAggregator extends MetricsAggregator {
private NumericMetricsAggregator(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
private NumericMetricsAggregator(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
}
public static abstract class SingleValue extends NumericMetricsAggregator {
protected SingleValue(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
protected SingleValue(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
}
public abstract double metric(long owningBucketOrd);
@ -44,8 +48,9 @@ public abstract class NumericMetricsAggregator extends MetricsAggregator {
public static abstract class MultiValue extends NumericMetricsAggregator {
protected MultiValue(String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
protected MultiValue(String name, AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
}
public abstract boolean hasMetric(String name);

View File

@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -37,6 +38,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -51,8 +53,9 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
ValueFormatter formatter;
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name,context, parent, metaData);
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
if (valuesSource != null) {
@ -72,22 +75,22 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
counts = bigArrays.grow(counts, bucket + 1);
sums = bigArrays.grow(sums, bucket + 1);
values.setDocument(doc);
final int valueCount = values.count();
values.setDocument(doc);
final int valueCount = values.count();
counts.increment(bucket, valueCount);
double sum = 0;
for (int i = 0; i < valueCount; i++) {
sum += values.valueAt(i);
}
double sum = 0;
for (int i = 0; i < valueCount; i++) {
sum += values.valueAt(i);
}
sums.increment(bucket, sum);
}
};
@ -103,12 +106,12 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
if (valuesSource == null || bucket >= sums.size()) {
return buildEmptyAggregation();
}
return new InternalAvg(name, sums.get(bucket), counts.get(bucket), formatter, metaData());
return new InternalAvg(name, sums.get(bucket), counts.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalAvg(name, 0.0, 0l, formatter, metaData());
return new InternalAvg(name, 0.0, 0l, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -118,13 +121,15 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new AvgAggregator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new AvgAggregator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new AvgAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new AvgAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers, metaData);
}
}

View File

@ -25,10 +25,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -56,8 +58,9 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
InternalAvg() {} // for serialization
public InternalAvg(String name, double sum, long count, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
public InternalAvg(String name, double sum, long count, @Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, reducers, metaData);
this.sum = sum;
this.count = count;
this.valueFormatter = formatter;
@ -85,7 +88,7 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
count += ((InternalAvg) aggregation).count;
sum += ((InternalAvg) aggregation).sum;
}
return new InternalAvg(getName(), sum, count, valueFormatter, getMetaData());
return new InternalAvg(getName(), sum, count, valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -42,11 +42,13 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -66,8 +68,8 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
private ValueFormatter formatter;
public CardinalityAggregator(String name, ValuesSource valuesSource, boolean rehash, int precision, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.rehash = rehash;
this.precision = precision;
@ -156,12 +158,12 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
// this Aggregator (and its HLL++ counters) is released.
HyperLogLogPlusPlus copy = new HyperLogLogPlusPlus(precision, BigArrays.NON_RECYCLING_INSTANCE, 1);
copy.merge(0, counts, owningBucketOrdinal);
return new InternalCardinality(name, copy, formatter, metaData());
return new InternalCardinality(name, copy, formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalCardinality(name, null, formatter, metaData());
return new InternalCardinality(name, null, formatter, reducers(), metaData());
}
@Override

View File

@ -22,12 +22,14 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.List;
import java.util.Map;
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> {
@ -46,16 +48,19 @@ final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<V
}
@Override
protected Aggregator createUnmapped(AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new CardinalityAggregator(name, null, true, precision(parent), config.formatter(), context, parent, metaData);
protected Aggregator createUnmapped(AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
return new CardinalityAggregator(name, null, true, precision(parent), config.formatter(), context, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext context, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (!(valuesSource instanceof ValuesSource.Numeric) && !rehash) {
throw new AggregationExecutionException("Turning off rehashing for cardinality aggregation [" + name + "] on non-numeric values in not allowed");
}
return new CardinalityAggregator(name, valuesSource, rehash, precision(parent), config.formatter(), context, parent, metaData);
return new CardinalityAggregator(name, valuesSource, rehash, precision(parent), config.formatter(), context, parent, reducers,
metaData);
}
/*

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -53,8 +54,9 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
private HyperLogLogPlusPlus counts;
InternalCardinality(String name, HyperLogLogPlusPlus counts, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
InternalCardinality(String name, HyperLogLogPlusPlus counts, @Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, reducers, metaData);
this.counts = counts;
this.valueFormatter = formatter;
}
@ -107,7 +109,7 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation
if (cardinality.counts != null) {
if (reduced == null) {
reduced = new InternalCardinality(name, new HyperLogLogPlusPlus(cardinality.counts.precision(),
BigArrays.NON_RECYCLING_INSTANCE, 1), this.valueFormatter, getMetaData());
BigArrays.NON_RECYCLING_INSTANCE, 1), this.valueFormatter, reducers(), getMetaData());
}
reduced.merge(cardinality);
}

View File

@ -30,12 +30,14 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public final class GeoBoundsAggregator extends MetricsAggregator {
@ -50,8 +52,10 @@ public final class GeoBoundsAggregator extends MetricsAggregator {
DoubleArray negRights;
protected GeoBoundsAggregator(String name, AggregationContext aggregationContext,
Aggregator parent, ValuesSource.GeoPoint valuesSource, boolean wrapLongitude, Map<String, Object> metaData) throws IOException {
super(name, aggregationContext, parent, metaData);
Aggregator parent,
ValuesSource.GeoPoint valuesSource, boolean wrapLongitude, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.wrapLongitude = wrapLongitude;
if (valuesSource != null) {
@ -149,13 +153,13 @@ public final class GeoBoundsAggregator extends MetricsAggregator {
double posRight = posRights.get(owningBucketOrdinal);
double negLeft = negLefts.get(owningBucketOrdinal);
double negRight = negRights.get(owningBucketOrdinal);
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metaData());
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalGeoBounds(name, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY,
Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, wrapLongitude, metaData());
Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, wrapLongitude, reducers(), metaData());
}
@Override
@ -173,14 +177,16 @@ public final class GeoBoundsAggregator extends MetricsAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new GeoBoundsAggregator(name, aggregationContext, parent, null, wrapLongitude, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new GeoBoundsAggregator(name, aggregationContext, parent, null, wrapLongitude, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext,
Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new GeoBoundsAggregator(name, aggregationContext, parent, valuesSource, wrapLongitude, metaData);
Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new GeoBoundsAggregator(name, aggregationContext, parent, valuesSource, wrapLongitude, reducers, metaData);
}
}

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.List;
@ -56,8 +57,9 @@ public class InternalGeoBounds extends InternalMetricsAggregation implements Geo
}
InternalGeoBounds(String name, double top, double bottom, double posLeft, double posRight,
double negLeft, double negRight, boolean wrapLongitude, Map<String, Object> metaData) {
super(name, metaData);
double negLeft, double negRight,
boolean wrapLongitude, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.top = top;
this.bottom = bottom;
this.posLeft = posLeft;
@ -103,7 +105,7 @@ public class InternalGeoBounds extends InternalMetricsAggregation implements Geo
negRight = bounds.negRight;
}
}
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, getMetaData());
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, reducers(), getMetaData());
}
@Override

View File

@ -25,10 +25,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -55,8 +57,8 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
InternalMax() {} // for serialization
public InternalMax(String name, double max, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
public InternalMax(String name, double max, @Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.valueFormatter = formatter;
this.max = max;
}
@ -81,7 +83,7 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
for (InternalAggregation aggregation : reduceContext.aggregations()) {
max = Math.max(max, ((InternalMax) aggregation).max);
}
return new InternalMax(name, max, valueFormatter, getMetaData());
return new InternalMax(name, max, valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -38,6 +39,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -51,8 +53,9 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray maxes;
public MaxAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
if (valuesSource != null) {
@ -71,22 +74,22 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
final NumericDoubleValues values = MultiValueMode.MAX.select(allValues, Double.NEGATIVE_INFINITY);
return new LeafBucketCollectorBase(sub, allValues) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= maxes.size()) {
long from = maxes.size();
long from = maxes.size();
maxes = bigArrays.grow(maxes, bucket + 1);
maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY);
}
final double value = values.get(doc);
maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY);
}
final double value = values.get(doc);
double max = maxes.get(bucket);
max = Math.max(max, value);
max = Math.max(max, value);
maxes.set(bucket, max);
}
@ -103,12 +106,12 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
if (valuesSource == null || bucket >= maxes.size()) {
return buildEmptyAggregation();
}
return new InternalMax(name, maxes.get(bucket), formatter, metaData());
return new InternalMax(name, maxes.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalMax(name, Double.NEGATIVE_INFINITY, formatter, metaData());
return new InternalMax(name, Double.NEGATIVE_INFINITY, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -118,13 +121,15 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new MaxAggregator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new MaxAggregator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new MaxAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new MaxAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers, metaData);
}
}

View File

@ -25,10 +25,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -56,8 +58,8 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
InternalMin() {} // for serialization
public InternalMin(String name, double min, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
public InternalMin(String name, double min, @Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.min = min;
this.valueFormatter = formatter;
}
@ -82,7 +84,7 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
for (InternalAggregation aggregation : reduceContext.aggregations()) {
min = Math.min(min, ((InternalMin) aggregation).min);
}
return new InternalMin(getName(), min, this.valueFormatter, getMetaData());
return new InternalMin(getName(), min, this.valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -38,6 +39,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -51,8 +53,9 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray mins;
public MinAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
mins = context.bigArrays().newDoubleArray(1, false);
@ -71,22 +74,22 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
final NumericDoubleValues values = MultiValueMode.MIN.select(allValues, Double.POSITIVE_INFINITY);
return new LeafBucketCollectorBase(sub, allValues) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= mins.size()) {
long from = mins.size();
long from = mins.size();
mins = bigArrays.grow(mins, bucket + 1);
mins.fill(from, mins.size(), Double.POSITIVE_INFINITY);
}
final double value = values.get(doc);
mins.fill(from, mins.size(), Double.POSITIVE_INFINITY);
}
final double value = values.get(doc);
double min = mins.get(bucket);
min = Math.min(min, value);
min = Math.min(min, value);
mins.set(bucket, min);
}
@ -103,12 +106,12 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
if (valuesSource == null || bucket >= mins.size()) {
return buildEmptyAggregation();
}
return new InternalMin(name, mins.get(bucket), formatter, metaData());
return new InternalMin(name, mins.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalMin(name, Double.POSITIVE_INFINITY, formatter, metaData());
return new InternalMin(name, Double.POSITIVE_INFINITY, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -118,13 +121,15 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new MinAggregator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new MinAggregator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new MinAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new MinAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers, metaData);
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
@ -44,8 +45,9 @@ abstract class AbstractInternalPercentiles extends InternalNumericMetricsAggrega
AbstractInternalPercentiles() {} // for serialization
public AbstractInternalPercentiles(String name, double[] keys, TDigestState state, boolean keyed, @Nullable ValueFormatter formatter,
List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, metaData);
super(name, reducers, metaData);
this.keys = keys;
this.state = state;
this.keyed = keyed;
@ -70,10 +72,11 @@ abstract class AbstractInternalPercentiles extends InternalNumericMetricsAggrega
}
merged.add(percentiles.state);
}
return createReduced(getName(), keys, merged, keyed, getMetaData());
return createReduced(getName(), keys, merged, keyed, reducers(), getMetaData());
}
protected abstract AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, Map<String, Object> metaData);
protected abstract AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData);
@Override
protected void doReadFrom(StreamInput in) throws IOException {

View File

@ -31,11 +31,13 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public abstract class AbstractPercentilesAggregator extends NumericMetricsAggregator.MultiValue {
@ -53,8 +55,9 @@ public abstract class AbstractPercentilesAggregator extends NumericMetricsAggreg
public AbstractPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, AggregationContext context,
Aggregator parent, double[] keys, double compression, boolean keyed,
@Nullable ValueFormatter formatter, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
@Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.keyed = keyed;
this.formatter = formatter;

View File

@ -24,10 +24,12 @@ import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
@ -53,8 +55,9 @@ public class InternalPercentileRanks extends AbstractInternalPercentiles impleme
InternalPercentileRanks() {} // for serialization
public InternalPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, @Nullable ValueFormatter formatter,
List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, cdfValues, state, keyed, formatter, metaData);
super(name, cdfValues, state, keyed, formatter, reducers, metaData);
}
@Override
@ -77,8 +80,9 @@ public class InternalPercentileRanks extends AbstractInternalPercentiles impleme
return percent(key);
}
protected AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, Map<String, Object> metaData) {
return new InternalPercentileRanks(name, keys, merged, keyed, valueFormatter, metaData);
protected AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
return new InternalPercentileRanks(name, keys, merged, keyed, valueFormatter, reducers, metaData);
}
@Override

View File

@ -24,10 +24,12 @@ import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
@ -53,8 +55,9 @@ public class InternalPercentiles extends AbstractInternalPercentiles implements
InternalPercentiles() {} // for serialization
public InternalPercentiles(String name, double[] percents, TDigestState state, boolean keyed, @Nullable ValueFormatter formatter,
List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, percents, state, keyed, formatter, metaData);
super(name, percents, state, keyed, formatter, reducers, metaData);
}
@Override
@ -77,8 +80,9 @@ public class InternalPercentiles extends AbstractInternalPercentiles implements
return percentile(key);
}
protected AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, Map<String, Object> metaData) {
return new InternalPercentiles(name, keys, merged, keyed, valueFormatter, metaData);
protected AbstractInternalPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed,
List<Reducer> reducers, Map<String, Object> metaData) {
return new InternalPercentiles(name, keys, merged, keyed, valueFormatter, reducers, metaData);
}
@Override

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
@ -30,6 +31,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -37,10 +39,10 @@ import java.util.Map;
*/
public class PercentileRanksAggregator extends AbstractPercentilesAggregator {
public PercentileRanksAggregator(String name, Numeric valuesSource, AggregationContext context,
Aggregator parent, double[] percents, double compression, boolean keyed, @Nullable ValueFormatter formatter,
Map<String, Object> metaData) throws IOException {
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metaData);
public PercentileRanksAggregator(String name, Numeric valuesSource, AggregationContext context, Aggregator parent, double[] percents,
double compression, boolean keyed, @Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, reducers, metaData);
}
@Override
@ -49,13 +51,13 @@ public class PercentileRanksAggregator extends AbstractPercentilesAggregator {
if (state == null) {
return buildEmptyAggregation();
} else {
return new InternalPercentileRanks(name, keys, state, keyed, formatter, metaData());
return new InternalPercentileRanks(name, keys, state, keyed, formatter, reducers(), metaData());
}
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalPercentileRanks(name, keys, new TDigestState(compression), keyed, formatter, metaData());
return new InternalPercentileRanks(name, keys, new TDigestState(compression), keyed, formatter, reducers(), metaData());
}
@Override
@ -83,15 +85,19 @@ public class PercentileRanksAggregator extends AbstractPercentilesAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new PercentileRanksAggregator(name, null, aggregationContext, parent, values, compression, keyed, config.formatter(),
reducers,
metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new PercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression,
keyed, config.formatter(), metaData);
keyed,
config.formatter(), reducers, metaData);
}
}
}

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestState;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
@ -30,6 +31,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -38,9 +40,10 @@ import java.util.Map;
public class PercentilesAggregator extends AbstractPercentilesAggregator {
public PercentilesAggregator(String name, Numeric valuesSource, AggregationContext context,
Aggregator parent, double[] percents, double compression, boolean keyed, @Nullable ValueFormatter formatter,
Aggregator parent, double[] percents,
double compression, boolean keyed, @Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metaData);
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, reducers, metaData);
}
@Override
@ -49,7 +52,7 @@ public class PercentilesAggregator extends AbstractPercentilesAggregator {
if (state == null) {
return buildEmptyAggregation();
} else {
return new InternalPercentiles(name, keys, state, keyed, formatter, metaData());
return new InternalPercentiles(name, keys, state, keyed, formatter, reducers(), metaData());
}
}
@ -65,7 +68,7 @@ public class PercentilesAggregator extends AbstractPercentilesAggregator {
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalPercentiles(name, keys, new TDigestState(compression), keyed, formatter, metaData());
return new InternalPercentiles(name, keys, new TDigestState(compression), keyed, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -83,15 +86,19 @@ public class PercentilesAggregator extends AbstractPercentilesAggregator {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new PercentilesAggregator(name, null, aggregationContext, parent, percents, compression, keyed, config.formatter(),
reducers,
metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new PercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression,
keyed, config.formatter(), metaData);
keyed,
config.formatter(), reducers, metaData);
}
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.io.IOException;
import java.util.ArrayList;
@ -61,13 +62,13 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
private InternalScriptedMetric() {
}
private InternalScriptedMetric(String name, Map<String, Object> metaData) {
super(name, metaData);
private InternalScriptedMetric(String name, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
}
public InternalScriptedMetric(String name, Object aggregation, String scriptLang, ScriptType scriptType, String reduceScript,
Map<String, Object> reduceParams, Map<String, Object> metaData) {
this(name, metaData);
Map<String, Object> reduceParams, List<Reducer> reducers, Map<String, Object> metaData) {
this(name, reducers, metaData);
this.aggregation = aggregation;
this.scriptType = scriptType;
this.reduceScript = reduceScript;
@ -104,7 +105,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
aggregation = aggregationObjects;
}
return new InternalScriptedMetric(firstAggregation.getName(), aggregation, firstAggregation.scriptLang, firstAggregation.scriptType,
firstAggregation.reduceScript, firstAggregation.reduceParams, getMetaData());
firstAggregation.reduceScript, firstAggregation.reduceParams, reducers(), getMetaData());
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.internal.SearchContext;
@ -57,8 +58,9 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
protected ScriptedMetricAggregator(String name, String scriptLang, ScriptType initScriptType, String initScript,
ScriptType mapScriptType, String mapScript, ScriptType combineScriptType, String combineScript, ScriptType reduceScriptType,
String reduceScript, Map<String, Object> params, Map<String, Object> reduceParams, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
String reduceScript, Map<String, Object> params, Map<String, Object> reduceParams, AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.scriptService = context.searchContext().scriptService();
this.scriptLang = scriptLang;
this.reduceScriptType = reduceScriptType;
@ -112,12 +114,13 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
} else {
aggregation = params.get("_agg");
}
return new InternalScriptedMetric(name, aggregation, scriptLang, reduceScriptType, reduceScript, reduceParams, metaData());
return new InternalScriptedMetric(name, aggregation, scriptLang, reduceScriptType, reduceScript, reduceParams, reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalScriptedMetric(name, null, scriptLang, reduceScriptType, reduceScript, reduceParams, metaData());
return new InternalScriptedMetric(name, null, scriptLang, reduceScriptType, reduceScript, reduceParams, reducers(), metaData());
}
public static class Factory extends AggregatorFactory {
@ -151,7 +154,8 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, context, parent);
}
@ -164,7 +168,7 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
reduceParams = deepCopyParams(this.reduceParams, context.searchContext());
}
return new ScriptedMetricAggregator(name, scriptLang, initScriptType, initScript, mapScriptType, mapScript, combineScriptType,
combineScript, reduceScriptType, reduceScript, params, reduceParams, context, parent, metaData);
combineScript, reduceScriptType, reduceScript, params, reduceParams, context, parent, reducers, metaData);
}
@SuppressWarnings({ "unchecked" })

View File

@ -26,10 +26,12 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -69,8 +71,9 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
protected InternalStats() {} // for serialization
public InternalStats(String name, long count, double sum, double min, double max, @Nullable ValueFormatter formatter,
List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, metaData);
super(name, reducers, metaData);
this.count = count;
this.sum = sum;
this.min = min;
@ -160,7 +163,7 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
max = Math.max(max, stats.getMax());
sum += stats.getSum();
}
return new InternalStats(name, count, sum, min, max, valueFormatter, getMetaData());
return new InternalStats(name, count, sum, min, max, valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -38,6 +39,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -55,8 +57,9 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
public StatsAggegator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
final BigArrays bigArrays = context.bigArrays();
@ -80,35 +83,35 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= counts.size()) {
final long from = counts.size();
final long from = counts.size();
final long overSize = BigArrays.overSize(bucket + 1);
counts = bigArrays.resize(counts, overSize);
sums = bigArrays.resize(sums, overSize);
mins = bigArrays.resize(mins, overSize);
maxes = bigArrays.resize(maxes, overSize);
mins.fill(from, overSize, Double.POSITIVE_INFINITY);
maxes.fill(from, overSize, Double.NEGATIVE_INFINITY);
}
counts = bigArrays.resize(counts, overSize);
sums = bigArrays.resize(sums, overSize);
mins = bigArrays.resize(mins, overSize);
maxes = bigArrays.resize(maxes, overSize);
mins.fill(from, overSize, Double.POSITIVE_INFINITY);
maxes.fill(from, overSize, Double.NEGATIVE_INFINITY);
}
values.setDocument(doc);
final int valuesCount = values.count();
values.setDocument(doc);
final int valuesCount = values.count();
counts.increment(bucket, valuesCount);
double sum = 0;
double sum = 0;
double min = mins.get(bucket);
double max = maxes.get(bucket);
for (int i = 0; i < valuesCount; i++) {
double value = values.valueAt(i);
sum += value;
min = Math.min(min, value);
max = Math.max(max, value);
}
for (int i = 0; i < valuesCount; i++) {
double value = values.valueAt(i);
sum += value;
min = Math.min(min, value);
max = Math.max(max, value);
}
sums.increment(bucket, sum);
mins.set(bucket, min);
maxes.set(bucket, max);
@ -145,12 +148,12 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
return buildEmptyAggregation();
}
return new InternalStats(name, counts.get(bucket), sums.get(bucket), mins.get(bucket),
maxes.get(bucket), formatter, metaData());
maxes.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalStats(name, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, formatter, metaData());
return new InternalStats(name, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -160,13 +163,15 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new StatsAggegator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new StatsAggegator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new StatsAggegator(name, valuesSource, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new StatsAggegator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers, metaData);
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -38,6 +39,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -55,10 +57,10 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
DoubleArray maxes;
DoubleArray sumOfSqrs;
public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource,
@Nullable ValueFormatter formatter, AggregationContext context,
Aggregator parent, double sigma, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, double sigma, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
this.sigma = sigma;
@ -167,16 +169,19 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
if (valuesSource == null) {
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, metaData());
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter,
reducers(), metaData());
}
assert owningBucketOrdinal < counts.size();
return new InternalExtendedStats(name, counts.get(owningBucketOrdinal), sums.get(owningBucketOrdinal),
mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter, metaData());
mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter,
reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, metaData());
return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, reducers(),
metaData());
}
@Override
@ -195,13 +200,16 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new ExtendedStatsAggregator(name, null, config.formatter(), aggregationContext, parent, sigma, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new ExtendedStatsAggregator(name, null, config.formatter(), aggregationContext, parent, sigma, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new ExtendedStatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, sigma, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new ExtendedStatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, sigma, reducers,
metaData);
}
}
}

View File

@ -27,9 +27,11 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -67,8 +69,9 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
InternalExtendedStats() {} // for serialization
public InternalExtendedStats(String name, long count, double sum, double min, double max, double sumOfSqrs,
double sigma, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, count, sum, min, max, formatter, metaData);
double sigma,
@Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, count, sum, min, max, formatter, reducers, metaData);
this.sumOfSqrs = sumOfSqrs;
this.sigma = sigma;
}
@ -150,7 +153,8 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
sumOfSqrs += stats.getSumOfSquares();
}
final InternalStats stats = super.doReduce(reduceContext);
return new InternalExtendedStats(name, stats.getCount(), stats.getSum(), stats.getMin(), stats.getMax(), sumOfSqrs, sigma, valueFormatter, getMetaData());
return new InternalExtendedStats(name, stats.getCount(), stats.getSum(), stats.getMin(), stats.getMax(), sumOfSqrs, sigma,
valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -25,10 +25,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -55,8 +57,8 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
InternalSum() {} // for serialization
InternalSum(String name, double sum, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
InternalSum(String name, double sum, @Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.sum = sum;
this.valueFormatter = formatter;
}
@ -81,7 +83,7 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
for (InternalAggregation aggregation : reduceContext.aggregations()) {
sum += ((InternalSum) aggregation).sum;
}
return new InternalSum(name, sum, valueFormatter, getMetaData());
return new InternalSum(name, sum, valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -36,6 +37,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,8 +51,9 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray sums;
public SumAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
if (valuesSource != null) {
@ -68,19 +71,19 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
sums = bigArrays.grow(sums, bucket + 1);
values.setDocument(doc);
final int valuesCount = values.count();
double sum = 0;
for (int i = 0; i < valuesCount; i++) {
sum += values.valueAt(i);
}
values.setDocument(doc);
final int valuesCount = values.count();
double sum = 0;
for (int i = 0; i < valuesCount; i++) {
sum += values.valueAt(i);
}
sums.increment(bucket, sum);
}
};
@ -96,12 +99,12 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
if (valuesSource == null || bucket >= sums.size()) {
return buildEmptyAggregation();
}
return new InternalSum(name, sums.get(bucket), formatter, metaData());
return new InternalSum(name, sums.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalSum(name, 0.0, formatter, metaData());
return new InternalSum(name, 0.0, formatter, reducers(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
@ -111,13 +114,15 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new SumAggregator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new SumAggregator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new SumAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new SumAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers, metaData);
}
}

View File

@ -36,10 +36,12 @@ import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
@ -48,6 +50,7 @@ import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -68,8 +71,9 @@ public class TopHitsAggregator extends MetricsAggregator {
final SubSearchContext subSearchContext;
final LongObjectPagedHashMap<TopDocsAndLeafCollector> topDocsCollectors;
public TopHitsAggregator(FetchPhase fetchPhase, SubSearchContext subSearchContext, String name, AggregationContext context, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, context, parent, metaData);
public TopHitsAggregator(FetchPhase fetchPhase, SubSearchContext subSearchContext, String name, AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.fetchPhase = fetchPhase;
topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays());
this.subSearchContext = subSearchContext;
@ -82,8 +86,8 @@ public class TopHitsAggregator extends MetricsAggregator {
return sort.needsScores() || subSearchContext.trackScores();
} else {
// sort by score
return true;
}
return true;
}
}
@Override
@ -180,8 +184,9 @@ public class TopHitsAggregator extends MetricsAggregator {
}
@Override
public Aggregator createInternal(AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new TopHitsAggregator(fetchPhase, subSearchContext, name, aggregationContext, parent, metaData);
public Aggregator createInternal(AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new TopHitsAggregator(fetchPhase, subSearchContext, name, aggregationContext, parent, reducers, metaData);
}
@Override

View File

@ -25,9 +25,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -54,8 +56,9 @@ public class InternalValueCount extends InternalNumericMetricsAggregation.Single
InternalValueCount() {} // for serialization
public InternalValueCount(String name, long value, @Nullable ValueFormatter formatter, Map<String, Object> metaData) {
super(name, metaData);
public InternalValueCount(String name, long value, @Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) {
super(name, reducers, metaData);
this.value = value;
this.valueFormatter = formatter;
}
@ -81,7 +84,7 @@ public class InternalValueCount extends InternalNumericMetricsAggregation.Single
for (InternalAggregation aggregation : reduceContext.aggregations()) {
valueCount += ((InternalValueCount) aggregation).value;
}
return new InternalValueCount(name, valueCount, valueFormatter, getMetaData());
return new InternalValueCount(name, valueCount, valueFormatter, reducers(), getMetaData());
}
@Override

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
@ -36,6 +37,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -53,8 +55,9 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
LongArray counts;
public ValueCountAggregator(String name, ValuesSource valuesSource, @Nullable ValueFormatter formatter,
AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, aggregationContext, parent, metaData);
AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
if (valuesSource != null) {
@ -67,17 +70,17 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedBinaryDocValues values = valuesSource.bytesValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
counts = bigArrays.grow(counts, bucket + 1);
values.setDocument(doc);
counts.increment(bucket, values.count());
}
}
};
}
@ -92,12 +95,12 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
if (valuesSource == null || bucket >= counts.size()) {
return buildEmptyAggregation();
}
return new InternalValueCount(name, counts.get(bucket), formatter, metaData());
return new InternalValueCount(name, counts.get(bucket), formatter, reducers(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalValueCount(name, 0l, formatter, metaData());
return new InternalValueCount(name, 0l, formatter, reducers(), metaData());
}
@Override
@ -112,13 +115,15 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
}
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
return new ValueCountAggregator(name, null, config.formatter(), aggregationContext, parent, metaData);
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
return new ValueCountAggregator(name, null, config.formatter(), aggregationContext, parent, reducers, metaData);
}
@Override
protected Aggregator doCreateInternal(VS valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
return new ValueCountAggregator(name, valuesSource, config.formatter(), aggregationContext, parent,
protected Aggregator doCreateInternal(VS valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new ValueCountAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, reducers,
metaData);
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.reducers;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
public abstract class Reducer {
/**
* Parses the reducer request and creates the appropriate reducer factory
* for it.
*
* @see {@link ReducerFactory}
*/
public static interface Parser {
/**
* @return The reducer type this parser is associated with.
*/
String type();
/**
* Returns the reducer factory with which this parser is associated.
*
* @param reducerName
* The name of the reducer
* @param parser
* The xcontent parser
* @param context
* The search context
* @return The resolved reducer factory
* @throws java.io.IOException
* When parsing fails
*/
ReducerFactory parse(String reducerName, XContentParser parser, SearchContext context) throws IOException;
}
public abstract InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext);
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.reducers;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
import java.util.Map;
/**
* A factory that knows how to create an {@link Aggregator} of a specific type.
*/
public abstract class ReducerFactory implements Streamable {
protected String name;
protected String type;
protected Map<String, Object> metaData;
/**
* Constructs a new reducer factory.
*
* @param name
* The aggregation name
* @param type
* The aggregation type
*/
public ReducerFactory(String name, String type) {
this.name = name;
this.type = type;
}
/**
* Validates the state of this factory (makes sure the factory is properly configured)
*/
public final void validate() {
doValidate();
}
protected abstract Reducer createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
Map<String, Object> metaData) throws IOException;
/**
* Creates the reducer
*
* @param context
* The aggregation context
* @param parent
* The parent aggregator (if this is a top level factory, the
* parent will be {@code null})
* @param collectsFromSingleBucket
* If true then the created aggregator will only be collected
* with <tt>0</tt> as a bucket ordinal. Some factories can take
* advantage of this in order to return more optimized
* implementations.
*
* @return The created aggregator
*/
public final Reducer create(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket) throws IOException {
Reducer aggregator = createInternal(context, parent, collectsFromSingleBucket, this.metaData);
return aggregator;
}
public void doValidate() {
}
public void setMetaData(Map<String, Object> metaData) {
this.metaData = metaData;
}
}

View File

@ -18,10 +18,16 @@
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
@ -49,12 +55,13 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
}
@Override
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException {
public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
if (config.unmapped()) {
return createUnmapped(context, parent, metaData);
return createUnmapped(context, parent, reducers, metaData);
}
VS vs = context.valuesSource(config);
return doCreateInternal(vs, context, parent, collectsFromSingleBucket, metaData);
return doCreateInternal(vs, context, parent, collectsFromSingleBucket, reducers, metaData);
}
@Override
@ -64,9 +71,11 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
}
}
protected abstract Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException;
protected abstract Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException;
protected abstract Aggregator doCreateInternal(VS valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException;
protected abstract Aggregator doCreateInternal(VS valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException;
private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class<VS> requiredValuesSourceType) {
ValuesSourceConfig config;

View File

@ -30,7 +30,16 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchTestCase;
@ -41,11 +50,16 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*
@ -96,13 +110,15 @@ public class SignificanceHeuristicTests extends ElasticsearchTestCase {
if (randomBoolean()) {
BytesRef term = new BytesRef("123.0");
buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null));
sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets, null);
sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets,
(List<Reducer>) Collections.EMPTY_LIST, null);
sTerms[1] = new SignificantLongTerms();
} else {
BytesRef term = new BytesRef("someterm");
buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY));
sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, null);
sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, (List<Reducer>) Collections.EMPTY_LIST,
null);
sTerms[1] = new SignificantStringTerms();
}
return sTerms;