Fix some indentation issues.

This commit is contained in:
Adrien Grand 2015-04-29 15:06:58 +02:00
parent bf9739d0f0
commit 891dfee0d6
32 changed files with 186 additions and 198 deletions

View File

@ -33,7 +33,7 @@ import java.util.Map;
public abstract class SingleBucketAggregator extends BucketsAggregator {
protected SingleBucketAggregator(String name, AggregatorFactories factories,
AggregationContext aggregationContext, Aggregator parent,
AggregationContext aggregationContext, Aggregator parent,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
}

View File

@ -65,7 +65,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext,
Aggregator parent, String parentType, Filter childFilter, Filter parentFilter,
ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource,
ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource,
long maxOrd, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.parentType = parentType;

View File

@ -48,7 +48,7 @@ public class FilterAggregator extends SingleBucketAggregator {
org.apache.lucene.search.Filter filter,
AggregatorFactories factories,
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers,
Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.filter = filter;
@ -61,12 +61,12 @@ public class FilterAggregator extends SingleBucketAggregator {
// no need to provide deleted docs to the filter
final Bits bits = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filter.getDocIdSet(ctx, null));
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bits.get(doc)) {
if (bits.get(doc)) {
collectBucket(sub, doc, bucket);
}
}
}
};
}

View File

@ -61,7 +61,7 @@ public class FiltersAggregator extends BucketsAggregator {
private final boolean keyed;
public FiltersAggregator(String name, AggregatorFactories factories, List<KeyedFilter> filters, boolean keyed, AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.keyed = keyed;
@ -78,14 +78,14 @@ public class FiltersAggregator extends BucketsAggregator {
bits[i] = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].filter.getDocIdSet(ctx, null));
}
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
for (int i = 0; i < bits.length; i++) {
if (bits[i].get(doc)) {
for (int i = 0; i < bits.length; i++) {
if (bits[i].get(doc)) {
collectBucket(sub, doc, bucketOrd(bucket, i));
}
}
}
}
};
}

View File

@ -51,9 +51,8 @@ public class GeoHashGridAggregator extends BucketsAggregator {
private final LongHash bucketOrds;
public GeoHashGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource,
int requiredSize,
int shardSize, AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.requiredSize = requiredSize;

View File

@ -53,7 +53,7 @@ public class GlobalAggregator extends SingleBucketAggregator {
public void collect(int doc, long bucket) throws IOException {
assert bucket == 0 : "global aggregator can only be a top level aggregator";
collectBucket(sub, doc, bucket);
}
}
};
}

View File

@ -57,14 +57,12 @@ public class HistogramAggregator extends BucketsAggregator {
private final InternalHistogram.Factory histogramFactory;
private final LongHash bucketOrds;
private SortedNumericDocValues values;
public HistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, InternalOrder order,
boolean keyed, long minDocCount, @Nullable ExtendedBounds extendedBounds,
@Nullable ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
InternalHistogram.Factory<?> histogramFactory,
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
InternalHistogram.Factory<?> histogramFactory, AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.rounding = rounding;

View File

@ -191,7 +191,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
public ValueFormatter getFormatter() {
return formatter;
}
}
public boolean getKeyed() {
return keyed;

View File

@ -44,8 +44,8 @@ public class MissingAggregator extends SingleBucketAggregator {
private final ValuesSource valuesSource;
public MissingAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
}

View File

@ -68,58 +68,58 @@ public class NestedAggregator extends SingleBucketAggregator {
this.parentFilter = null;
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null);
if (DocIdSets.isEmpty(childDocIdSet)) {
childDocs = null;
} else {
childDocs = childDocIdSet.iterator();
}
if (DocIdSets.isEmpty(childDocIdSet)) {
childDocs = null;
} else {
childDocs = childDocIdSet.iterator();
}
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int parentDoc, long bucket) throws IOException {
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected
// if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip:
if (parentDoc == 0 || childDocs == null) {
return;
}
if (parentFilter == null) {
// The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs
// So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed.
// So the trick is to set at the last moment just before needed and we can use its child filter as the
// parent filter.
// if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip:
if (parentDoc == 0 || childDocs == null) {
return;
}
if (parentFilter == null) {
// The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs
// So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed.
// So the trick is to set at the last moment just before needed and we can use its child filter as the
// parent filter.
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution
Filter parentFilterNotCached = findClosestNestedPath(parent());
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
}
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
}
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);
if (DocIdSets.isEmpty(parentSet)) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null;
return;
} else {
parentDocs = parentSet.bits();
}
}
if (DocIdSets.isEmpty(parentSet)) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null;
return;
} else {
parentDocs = parentSet.bits();
}
}
final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
int childDocId = childDocs.docID();
if (childDocId <= prevParentDoc) {
childDocId = childDocs.advance(prevParentDoc + 1);
}
final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
int childDocId = childDocs.docID();
if (childDocId <= prevParentDoc) {
childDocId = childDocs.advance(prevParentDoc + 1);
}
for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) {
for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) {
collectBucket(sub, childDocId, bucket);
}
}
}
};
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), reducers(),

View File

@ -72,29 +72,29 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx);
final BitSet parentDocs;
if (DocIdSets.isEmpty(docIdSet)) {
if (DocIdSets.isEmpty(docIdSet)) {
return LeafBucketCollector.NO_OP_COLLECTOR;
} else {
parentDocs = docIdSet.bits();
}
} else {
parentDocs = docIdSet.bits();
}
final LongIntOpenHashMap bucketOrdToLastCollectedParentDoc = new LongIntOpenHashMap(32);
return new LeafBucketCollectorBase(sub, null) {
@Override
@Override
public void collect(int childDoc, long bucket) throws IOException {
// fast forward to retrieve the parentDoc this childDoc belongs to
final int parentDoc = parentDocs.nextSetBit(childDoc);
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
// fast forward to retrieve the parentDoc this childDoc belongs to
final int parentDoc = parentDocs.nextSetBit(childDoc);
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
if (bucketOrdToLastCollectedParentDoc.containsKey(bucket)) {
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
if (parentDoc > lastCollectedParentDoc) {
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
if (parentDoc > lastCollectedParentDoc) {
collectBucket(sub, parentDoc, bucket);
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
}
} else {
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
}
} else {
collectBucket(sub, parentDoc, bucket);
bucketOrdToLastCollectedParentDoc.put(bucket, parentDoc);
}
}
}
}
};
}

View File

@ -105,7 +105,7 @@ public class RangeAggregator extends BucketsAggregator {
List<Range> ranges,
boolean keyed,
AggregationContext aggregationContext,
Aggregator parent, List<Reducer> reducers,
Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, reducers, metaData);
@ -140,15 +140,15 @@ public class RangeAggregator extends BucketsAggregator {
final LeafBucketCollector sub) throws IOException {
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
values.setDocument(doc);
final int valuesCount = values.count();
for (int i = 0, lo = 0; i < valuesCount; ++i) {
final double value = values.valueAt(i);
values.setDocument(doc);
final int valuesCount = values.count();
for (int i = 0, lo = 0; i < valuesCount; ++i) {
final double value = values.valueAt(i);
lo = collect(doc, value, bucket, lo);
}
}
}
}
private int collect(int doc, double value, long owningBucketOrdinal, int lowBound) throws IOException {
int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes
@ -267,7 +267,7 @@ public class RangeAggregator extends BucketsAggregator {
ValueFormat format,
AggregationContext context,
Aggregator parent,
InternalRange.Factory factory, List<Reducer> reducers,
InternalRange.Factory factory, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);

View File

@ -50,7 +50,7 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri
public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories,
ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds,
IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent,
IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent,
SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent,

View File

@ -46,10 +46,9 @@ import java.util.Map;
public class SignificantLongTermsAggregator extends LongTermsAggregator {
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
BucketCountThresholds bucketCountThresholds,
AggregationContext aggregationContext,
Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, IncludeExclude.LongFilter includeExclude,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext,
Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, IncludeExclude.LongFilter includeExclude,
List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, format, null, bucketCountThresholds, aggregationContext, parent,
SubAggCollectionMode.DEPTH_FIRST, false, includeExclude, reducers, metaData);

View File

@ -50,7 +50,7 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator {
public SignificantStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
BucketCountThresholds bucketCountThresholds,
IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext, Aggregator parent,
SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData)
SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
super(name, factories, valuesSource, null, bucketCountThresholds, includeExclude, aggregationContext, parent,

View File

@ -43,7 +43,7 @@ import java.util.Map;
public class DoubleTermsAggregator extends LongTermsAggregator {
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError,
IncludeExclude.LongFilter longFilter, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, factories, valuesSource, format, order, bucketCountThresholds, aggregationContext, parent, collectionMode,

View File

@ -65,7 +65,7 @@ public class LongTermsAggregator extends TermsAggregator {
this.longFilter = longFilter;
bucketOrds = new LongHash(1, aggregationContext.bigArrays());
}
@Override
public boolean needsScores() {
return (valuesSource != null && valuesSource.needsScores()) || super.needsScores();
@ -80,30 +80,30 @@ public class LongTermsAggregator extends TermsAggregator {
final LeafBucketCollector sub) throws IOException {
final SortedNumericDocValues values = getValues(valuesSource, ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
assert owningBucketOrdinal == 0;
values.setDocument(doc);
final int valuesCount = values.count();
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
assert owningBucketOrdinal == 0;
values.setDocument(doc);
final int valuesCount = values.count();
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
if (previous != val || i == 0) {
if ((longFilter == null) || (longFilter.accept(val))) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
if (previous != val || i == 0) {
if ((longFilter == null) || (longFilter.accept(val))) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
collectExistingBucket(sub, doc, bucketOrdinal);
} else {
} else {
collectBucket(sub, doc, bucketOrdinal);
}
}
}
previous = val;
}
}
previous = val;
}
}
}
};
}
@ -152,7 +152,7 @@ public class LongTermsAggregator extends TermsAggregator {
list[i] = bucket;
otherDocCount -= bucket.docCount;
}
runDeferredCollections(survivingBucketOrds);
//Now build the aggs
@ -160,13 +160,12 @@ public class LongTermsAggregator extends TermsAggregator {
list[i].aggregations = bucketAggregations(list[i].bucketOrd);
list[i].docCountError = 0;
}
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),
bucketCountThresholds.getMinDocCount(), Arrays.asList(list), showTermDocCountError, 0, otherDocCount, reducers(),
metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new LongTerms(name, order, formatter, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(),

View File

@ -51,7 +51,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
Terms.Order order, BucketCountThresholds bucketCountThresholds,
IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
IncludeExclude.StringFilter includeExclude, AggregationContext aggregationContext,
Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {

View File

@ -243,8 +243,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
order, bucketCountThresholds, aggregationContext, parent, collectMode,
showTermDocCountError, longFilter, reducers,
metaData);
showTermDocCountError, longFilter, reducers, metaData);
}
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter();

View File

@ -53,8 +53,7 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
ValueFormatter formatter;
public AvgAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
@ -75,22 +74,22 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
counts = bigArrays.grow(counts, bucket + 1);
sums = bigArrays.grow(sums, bucket + 1);
values.setDocument(doc);
final int valueCount = values.count();
values.setDocument(doc);
final int valueCount = values.count();
counts.increment(bucket, valueCount);
double sum = 0;
for (int i = 0; i < valueCount; i++) {
sum += values.valueAt(i);
}
double sum = 0;
for (int i = 0; i < valueCount; i++) {
sum += values.valueAt(i);
}
sums.increment(bucket, sum);
}
};

View File

@ -51,10 +51,9 @@ public final class GeoBoundsAggregator extends MetricsAggregator {
DoubleArray negLefts;
DoubleArray negRights;
protected GeoBoundsAggregator(String name, AggregationContext aggregationContext,
Aggregator parent,
ValuesSource.GeoPoint valuesSource, boolean wrapLongitude, List<Reducer> reducers, Map<String, Object> metaData)
throws IOException {
protected GeoBoundsAggregator(String name, AggregationContext aggregationContext, Aggregator parent,
ValuesSource.GeoPoint valuesSource, boolean wrapLongitude, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, aggregationContext, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.wrapLongitude = wrapLongitude;
@ -184,8 +183,7 @@ public final class GeoBoundsAggregator extends MetricsAggregator {
@Override
protected Aggregator doCreateInternal(ValuesSource.GeoPoint valuesSource, AggregationContext aggregationContext,
Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new GeoBoundsAggregator(name, aggregationContext, parent, valuesSource, wrapLongitude, reducers, metaData);
}

View File

@ -57,8 +57,8 @@ public class InternalGeoBounds extends InternalMetricsAggregation implements Geo
}
InternalGeoBounds(String name, double top, double bottom, double posLeft, double posRight,
double negLeft, double negRight,
boolean wrapLongitude, List<Reducer> reducers, Map<String, Object> metaData) {
double negLeft, double negRight, boolean wrapLongitude,
List<Reducer> reducers, Map<String, Object> metaData) {
super(name, reducers, metaData);
this.top = top;
this.bottom = bottom;

View File

@ -53,8 +53,8 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray maxes;
public MaxAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
@ -80,16 +80,16 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue {
final NumericDoubleValues values = MultiValueMode.MAX.select(allValues, Double.NEGATIVE_INFINITY);
return new LeafBucketCollectorBase(sub, allValues) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= maxes.size()) {
long from = maxes.size();
long from = maxes.size();
maxes = bigArrays.grow(maxes, bucket + 1);
maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY);
}
final double value = values.get(doc);
maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY);
}
final double value = values.get(doc);
double max = maxes.get(bucket);
max = Math.max(max, value);
max = Math.max(max, value);
maxes.set(bucket, max);
}

View File

@ -53,8 +53,8 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray mins;
public MinAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -74,22 +74,22 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
final NumericDoubleValues values = MultiValueMode.MIN.select(allValues, Double.POSITIVE_INFINITY);
return new LeafBucketCollectorBase(sub, allValues) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= mins.size()) {
long from = mins.size();
long from = mins.size();
mins = bigArrays.grow(mins, bucket + 1);
mins.fill(from, mins.size(), Double.POSITIVE_INFINITY);
}
final double value = values.get(doc);
mins.fill(from, mins.size(), Double.POSITIVE_INFINITY);
}
final double value = values.get(doc);
double min = mins.get(bucket);
min = Math.min(min, value);
min = Math.min(min, value);
mins.set(bucket, min);
}

View File

@ -55,8 +55,8 @@ public abstract class AbstractPercentilesAggregator extends NumericMetricsAggreg
public AbstractPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, AggregationContext context,
Aggregator parent, double[] keys, double compression, boolean keyed,
@Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
@Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.keyed = keyed;

View File

@ -96,8 +96,7 @@ public class PercentileRanksAggregator extends AbstractPercentilesAggregator {
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new PercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression,
keyed,
config.formatter(), reducers, metaData);
keyed, config.formatter(), reducers, metaData);
}
}
}

View File

@ -40,7 +40,7 @@ import java.util.Map;
public class PercentilesAggregator extends AbstractPercentilesAggregator {
public PercentilesAggregator(String name, Numeric valuesSource, AggregationContext context,
Aggregator parent, double[] percents,
Aggregator parent, double[] percents,
double compression, boolean keyed, @Nullable ValueFormatter formatter, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, valuesSource, context, parent, percents, compression, keyed, formatter, reducers, metaData);
@ -97,8 +97,7 @@ public class PercentilesAggregator extends AbstractPercentilesAggregator {
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
return new PercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression,
keyed,
config.formatter(), reducers, metaData);
keyed, config.formatter(), reducers, metaData);
}
}
}

View File

@ -106,7 +106,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
aggregation = aggregationObjects;
}
return new InternalScriptedMetric(firstAggregation.getName(), aggregation, firstAggregation.scriptLang, firstAggregation.scriptType,
firstAggregation.reduceScript, firstAggregation.reduceParams, reducers(), getMetaData());
firstAggregation.reduceScript, firstAggregation.reduceParams, reducers(), getMetaData());
}

View File

@ -57,8 +57,8 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
public StatsAggegator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -83,35 +83,35 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= counts.size()) {
final long from = counts.size();
final long from = counts.size();
final long overSize = BigArrays.overSize(bucket + 1);
counts = bigArrays.resize(counts, overSize);
sums = bigArrays.resize(sums, overSize);
mins = bigArrays.resize(mins, overSize);
maxes = bigArrays.resize(maxes, overSize);
mins.fill(from, overSize, Double.POSITIVE_INFINITY);
maxes.fill(from, overSize, Double.NEGATIVE_INFINITY);
}
counts = bigArrays.resize(counts, overSize);
sums = bigArrays.resize(sums, overSize);
mins = bigArrays.resize(mins, overSize);
maxes = bigArrays.resize(maxes, overSize);
mins.fill(from, overSize, Double.POSITIVE_INFINITY);
maxes.fill(from, overSize, Double.NEGATIVE_INFINITY);
}
values.setDocument(doc);
final int valuesCount = values.count();
values.setDocument(doc);
final int valuesCount = values.count();
counts.increment(bucket, valuesCount);
double sum = 0;
double sum = 0;
double min = mins.get(bucket);
double max = maxes.get(bucket);
for (int i = 0; i < valuesCount; i++) {
double value = values.valueAt(i);
sum += value;
min = Math.min(min, value);
max = Math.max(max, value);
}
for (int i = 0; i < valuesCount; i++) {
double value = values.valueAt(i);
sum += value;
min = Math.min(min, value);
max = Math.max(max, value);
}
sums.increment(bucket, sum);
mins.set(bucket, min);
maxes.set(bucket, max);

View File

@ -69,8 +69,7 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
InternalExtendedStats() {} // for serialization
public InternalExtendedStats(String name, long count, double sum, double min, double max, double sumOfSqrs,
double sigma,
@Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
double sigma, @Nullable ValueFormatter formatter, List<Reducer> reducers, Map<String, Object> metaData) {
super(name, count, sum, min, max, formatter, reducers, metaData);
this.sumOfSqrs = sumOfSqrs;
this.sigma = sigma;

View File

@ -51,8 +51,8 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
DoubleArray sums;
public SumAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException {
AggregationContext context, Aggregator parent, List<Reducer> reducers,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, reducers, metaData);
this.valuesSource = valuesSource;
this.formatter = formatter;
@ -71,19 +71,19 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
sums = bigArrays.grow(sums, bucket + 1);
values.setDocument(doc);
final int valuesCount = values.count();
double sum = 0;
for (int i = 0; i < valuesCount; i++) {
sum += values.valueAt(i);
}
values.setDocument(doc);
final int valuesCount = values.count();
double sum = 0;
for (int i = 0; i < valuesCount; i++) {
sum += values.valueAt(i);
}
sums.increment(bucket, sum);
}
};

View File

@ -70,17 +70,17 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
final BigArrays bigArrays = context.bigArrays();
final SortedBinaryDocValues values = valuesSource.bytesValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
@Override
public void collect(int doc, long bucket) throws IOException {
counts = bigArrays.grow(counts, bucket + 1);
values.setDocument(doc);
counts.increment(bucket, values.count());
}
}
};
}