Deduplicate Empty InternalAggregations (#58386) (#59032)

Working through a heap dump for an unrelated issue I found that we can easily rack up
tens of MBs of duplicate empty instances in some cases.
I moved to a static constructor to guard against that in all cases.
This commit is contained in:
Armin Braun 2020-07-04 14:02:16 +02:00 committed by GitHub
parent 2fdd8f3a2c
commit 071d8b2c1c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 128 additions and 144 deletions

View File

@ -49,7 +49,7 @@ public class TransportNoopSearchAction extends HandledTransportAction<SearchRequ
listener.onResponse(new SearchResponse(new InternalSearchResponse(
new SearchHits(
new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f),
new InternalAggregations(Collections.emptyList()),
InternalAggregations.EMPTY,
new Suggest(Collections.emptyList()),
new SearchProfileShardResults(Collections.emptyMap()), false, false, 1),
"", 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY));

View File

@ -706,7 +706,7 @@ public final class SearchPhaseController {
InternalAggregations reduced =
InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forPartialReduction());
reducedAggs = aggsBuffer[0] = DelayableWriteable.referencing(reduced)
.asSerialized(InternalAggregations::new, namedWriteableRegistry);
.asSerialized(InternalAggregations::readFrom, namedWriteableRegistry);
long previousBufferSize = aggsCurrentBufferSize;
aggsMaxBufferSize = Math.max(aggsMaxBufferSize, aggsCurrentBufferSize);
aggsCurrentBufferSize = aggsBuffer[0].ramBytesUsed();
@ -729,7 +729,7 @@ public final class SearchPhaseController {
}
final int i = index++;
if (hasAggs) {
aggsBuffer[i] = querySearchResult.consumeAggs().asSerialized(InternalAggregations::new, namedWriteableRegistry);
aggsBuffer[i] = querySearchResult.consumeAggs().asSerialized(InternalAggregations::readFrom, namedWriteableRegistry);
aggsCurrentBufferSize += aggsBuffer[i].ramBytesUsed();
}
if (hasTopDocs) {

View File

@ -259,7 +259,7 @@ public abstract class AggregatorBase extends Aggregator {
for (Aggregator aggregator : subAggregators) {
aggs.add(aggregator.buildEmptyAggregation());
}
return new InternalAggregations(aggs);
return InternalAggregations.from(aggs);
}
@Override

View File

@ -69,7 +69,7 @@ public final class InternalAggregations extends Aggregations implements Writeabl
/**
* Constructs a new aggregation.
*/
public InternalAggregations(List<InternalAggregation> aggregations) {
private InternalAggregations(List<InternalAggregation> aggregations) {
super(aggregations);
this.pipelineTreeForBwcSerialization = null;
}
@ -85,19 +85,26 @@ public final class InternalAggregations extends Aggregations implements Writeabl
this.pipelineTreeForBwcSerialization = pipelineTreeSource;
}
public InternalAggregations(StreamInput in) throws IOException {
super(in.readList(stream -> in.readNamedWriteable(InternalAggregation.class)));
if (in.getVersion().before(Version.V_7_8_0) && in.getVersion().onOrAfter(Version.V_6_7_0)) {
in.readNamedWriteableList(PipelineAggregator.class);
public static InternalAggregations from(List<InternalAggregation> aggregations) {
if (aggregations.isEmpty()) {
return EMPTY;
}
/*
* Setting the pipeline tree source to null is here is correct but
* only because we don't immediately pass the InternalAggregations
* off to another node. Instead, we always reduce together with
* many aggregations and that always adds the tree read from the
* current request.
*/
pipelineTreeForBwcSerialization = null;
return new InternalAggregations(aggregations);
}
public static InternalAggregations readFrom(StreamInput in) throws IOException {
final InternalAggregations res = from(in.readList(stream -> in.readNamedWriteable(InternalAggregation.class)));
if (in.getVersion().before(Version.V_7_8_0) && in.getVersion().onOrAfter(Version.V_6_7_0)) {
/*
* Setting the pipeline tree source to null is here is correct but
* only because we don't immediately pass the InternalAggregations
* off to another node. Instead, we always reduce together with
* many aggregations and that always adds the tree read from the
* current request.
*/
in.readNamedWriteableList(PipelineAggregator.class);
}
return res;
}
@Override
@ -206,10 +213,10 @@ public final class InternalAggregations extends Aggregations implements Writeabl
for (PipelineAggregator pipelineAggregator : context.pipelineTreeRoot().aggregators()) {
SiblingPipelineAggregator sib = (SiblingPipelineAggregator) pipelineAggregator;
InternalAggregation newAgg = sib.doReduce(new InternalAggregations(reducedInternalAggs), context);
InternalAggregation newAgg = sib.doReduce(from(reducedInternalAggs), context);
reducedInternalAggs.add(newAgg);
}
return new InternalAggregations(reducedInternalAggs);
return from(reducedInternalAggs);
}
return reduced;
}
@ -258,7 +265,6 @@ public final class InternalAggregations extends Aggregations implements Writeabl
* Version of {@link #reduce(List, ReduceContext, Function)} for nodes inside the aggregation tree.
*/
public static InternalAggregations reduce(List<InternalAggregations> aggregationsList, ReduceContext context) {
return reduce(aggregationsList, context, InternalAggregations::new);
return reduce(aggregationsList, context, InternalAggregations::from);
}
}

View File

@ -192,7 +192,7 @@ public abstract class InternalMultiBucketAggregation<A extends InternalMultiBuck
PipelineTree subTree = pipelineTree.subTree(agg.getName());
aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree));
}
reducedBuckets.add(createBucket(new InternalAggregations(aggs), bucket));
reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket));
}
return reducedBuckets;
}

View File

@ -178,7 +178,7 @@ public abstract class BucketsAggregator extends AggregatorBase {
slice[i] = aggregations[i][ord];
}
final int thisOrd = ord;
result[ord] = new InternalAggregations(new AbstractList<InternalAggregation>() {
result[ord] = InternalAggregations.from(new AbstractList<InternalAggregation>() {
@Override
public InternalAggregation get(int index) {
return aggregations[index][thisOrd];
@ -353,17 +353,6 @@ public abstract class BucketsAggregator extends AggregatorBase {
InternalAggregation build(long owninigBucketOrd, List<B> buckets);
}
/**
* Utility method to build empty aggregations of the sub aggregators.
*/
protected final InternalAggregations bucketEmptyAggregations() {
final InternalAggregation[] aggregations = new InternalAggregation[subAggregators.length];
for (int i = 0; i < subAggregators.length; i++) {
aggregations[i] = subAggregators[i].buildEmptyAggregation();
}
return new InternalAggregations(Arrays.asList(aggregations));
}
@Override
public final void close() {
try (Releasable releasable = docCounts) {

View File

@ -64,7 +64,7 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio
protected InternalSingleBucketAggregation(StreamInput in) throws IOException {
super(in);
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override
@ -128,7 +128,7 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio
PipelineTree subTree = pipelineTree.subTree(agg.getName());
aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree));
}
InternalAggregations reducedSubAggs = new InternalAggregations(aggs);
InternalAggregations reducedSubAggs = InternalAggregations.from(aggs);
reduced = create(reducedSubAggs);
}
return super.reducePipelines(reduced, reduceContext, pipelineTree);

View File

@ -56,7 +56,7 @@ public class InternalAdjacencyMatrix
public InternalBucket(StreamInput in) throws IOException {
key = in.readOptionalString();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -301,7 +301,7 @@ public class InternalComposite
InternalBucket(StreamInput in, List<String> sourceNames, List<DocValueFormat> formats, int[] reverseMuls) throws IOException {
this.key = new CompositeKey(in);
this.docCount = in.readVLong();
this.aggregations = new InternalAggregations(in);
this.aggregations = InternalAggregations.readFrom(in);
this.reverseMuls = reverseMuls;
this.sourceNames = sourceNames;
this.formats = formats;

View File

@ -56,7 +56,7 @@ public class InternalFilters extends InternalMultiBucketAggregation<InternalFilt
this.keyed = keyed;
key = in.readOptionalString();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -50,7 +50,7 @@ public abstract class InternalGeoGridBucket<B extends InternalGeoGridBucket>
public InternalGeoGridBucket(StreamInput in) throws IOException {
hashAsLong = in.readLong();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -72,7 +72,7 @@ public final class InternalAutoDateHistogram extends
this.format = format;
key = in.readLong();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override
@ -163,7 +163,7 @@ public final class InternalAutoDateHistogram extends
roundingInfos[i] = new RoundingInfo(in);
}
roundingIdx = in.readVInt();
emptySubAggregations = new InternalAggregations(in);
emptySubAggregations = InternalAggregations.readFrom(in);
}
void writeTo(StreamOutput out) throws IOException {

View File

@ -76,7 +76,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
this.keyed = keyed;
key = in.readLong();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override
@ -174,7 +174,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
EmptyBucketInfo(StreamInput in) throws IOException {
rounding = Rounding.read(in);
subAggregations = new InternalAggregations(in);
subAggregations = InternalAggregations.readFrom(in);
bounds = in.readOptionalWriteable(ExtendedBounds::new);
}

View File

@ -72,7 +72,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
this.keyed = keyed;
key = in.readDouble();
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override
@ -166,7 +166,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
}
EmptyBucketInfo(StreamInput in) throws IOException {
this(in.readDouble(), in.readDouble(), in.readDouble(), in.readDouble(), new InternalAggregations(in));
this(in.readDouble(), in.readDouble(), in.readDouble(), in.readDouble(), InternalAggregations.readFrom(in));
}
public void writeTo(StreamOutput out) throws IOException {

View File

@ -105,7 +105,7 @@ public class InternalVariableWidthHistogram
centroid = in.readDouble();
docCount = in.readVLong();
bounds = new BucketBounds(in);
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override
@ -207,7 +207,7 @@ public class InternalVariableWidthHistogram
}
EmptyBucketInfo(StreamInput in) throws IOException {
this(new InternalAggregations(in));
this(InternalAggregations.readFrom(in));
}
public void writeTo(StreamOutput out) throws IOException {

View File

@ -81,7 +81,7 @@ public final class InternalBinaryRange
BytesRef from = in.readBoolean() ? in.readBytesRef() : null;
BytesRef to = in.readBoolean() ? in.readBytesRef() : null;
long docCount = in.readLong();
InternalAggregations aggregations = new InternalAggregations(in);
InternalAggregations aggregations = InternalAggregations.readFrom(in);
return new Bucket(format, keyed, key, from, to, docCount, aggregations);
}

View File

@ -37,7 +37,7 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed,
DocValueFormat formatter) {
super(key, from, to, docCount, new InternalAggregations(aggregations), keyed, formatter);
super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter);
}
public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValueType;
@ -35,10 +34,6 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
static class Bucket extends InternalRange.Bucket {
Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed) {
this(key, from, to, docCount, new InternalAggregations(aggregations), keyed);
}
Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed) {
super(key, from, to, docCount, aggregations, keyed, DocValueFormat.RAW);
}

View File

@ -248,7 +248,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
? in.readString()
: in.readOptionalString();
ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(),
new InternalAggregations(in), keyed, format));
InternalAggregations.readFrom(in), keyed, format));
}
this.ranges = ranges;
}

View File

@ -68,7 +68,7 @@ public abstract class InternalRareTerms<A extends InternalRareTerms<A, B>, B ext
protected Bucket(StreamInput in, DocValueFormat formatter) throws IOException {
this.format = formatter;
docCount = in.readVLong();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -84,7 +84,7 @@ public abstract class InternalTerms<A extends InternalTerms<A, B>, B extends Int
if (showDocCountError) {
docCountError = in.readLong();
}
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -53,7 +53,7 @@ public class SignificantLongTerms extends InternalMappedSignificantTerms<Signifi
supersetDf = in.readVLong();
term = in.readLong();
score = in.readDouble();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -57,7 +57,7 @@ public class SignificantStringTerms extends InternalMappedSignificantTerms<Signi
subsetDf = in.readVLong();
supersetDf = in.readVLong();
score = in.readDouble();
aggregations = new InternalAggregations(in);
aggregations = InternalAggregations.readFrom(in);
}
@Override

View File

@ -117,7 +117,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata());
aggs.add(simpleValue);
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs),
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs),
bucket);
newBuckets.add(newBucket);
}

View File

@ -87,7 +87,7 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), sum, formatter, metadata()));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
newBuckets.add(newBucket);
}
return factory.createAggregation(newBuckets);

View File

@ -98,7 +98,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
return (InternalAggregation) p;
}).collect(Collectors.toList());
aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, metadata()));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
newBuckets.add(newBucket);
} else {
newBuckets.add(bucket);

View File

@ -127,7 +127,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), movavg, formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
}
if (predict > 0) {
@ -158,7 +158,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, metadata()));
Bucket newBucket = factory.createBucket(newKey, bucket.getDocCount(), new InternalAggregations(aggs));
Bucket newBucket = factory.createBucket(newKey, bucket.getDocCount(), InternalAggregations.from(aggs));
// Overwrite the existing bucket with the new version
newBuckets.set(lastValidPosition + i + 1, newBucket);
@ -168,7 +168,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
aggs = new ArrayList<>();
aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, metadata()));
Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(aggs));
Bucket newBucket = factory.createBucket(newKey, 0, InternalAggregations.from(aggs));
// Since this is a new bucket, simply append it
newBuckets.add(newBucket);

View File

@ -157,7 +157,7 @@ public class MovFnPipelineAggregator extends PipelineAggregator {
.map(InternalAggregation.class::cast)
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), movavg, formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
index++;
}
newBuckets.add(newBucket);

View File

@ -114,7 +114,7 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map(
(p) -> (InternalAggregation) p).collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
}
newBuckets.add(newBucket);

View File

@ -46,7 +46,7 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator {
return aggregation.copyWithRewritenBuckets(aggregations -> {
List<InternalAggregation> aggs = aggregations.copyResults();
aggs.add(doReduce(aggregations, reduceContext));
return new InternalAggregations(aggs);
return InternalAggregations.from(aggs);
});
}

View File

@ -52,7 +52,7 @@ public class InternalSearchResponse extends SearchResponseSections implements Wr
public InternalSearchResponse(StreamInput in) throws IOException {
super(
new SearchHits(in),
in.readBoolean() ? new InternalAggregations(in) : null,
in.readBoolean() ? InternalAggregations.readFrom(in) : null,
in.readBoolean() ? new Suggest(in) : null,
in.readBoolean(),
in.readOptionalBoolean(),

View File

@ -319,7 +319,7 @@ public final class QuerySearchResult extends SearchPhaseResult {
setTopDocs(readTopDocs(in));
if (in.getVersion().before(Version.V_7_7_0)) {
if (hasAggs = in.readBoolean()) {
aggregations = DelayableWriteable.referencing(new InternalAggregations(in));
aggregations = DelayableWriteable.referencing(InternalAggregations.readFrom(in));
}
if (in.getVersion().before(Version.V_7_2_0)) {
// The list of PipelineAggregators is sent by old versions. We don't need it anyway.
@ -327,7 +327,7 @@ public final class QuerySearchResult extends SearchPhaseResult {
}
} else {
if (hasAggs = in.readBoolean()) {
aggregations = DelayableWriteable.delayed(InternalAggregations::new, in);
aggregations = DelayableWriteable.delayed(InternalAggregations::readFrom, in);
}
}
if (in.readBoolean()) {

View File

@ -396,7 +396,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN),
new DocValueFormat[0]);
InternalAggregations aggs = new InternalAggregations(singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, emptyMap())));
InternalAggregations aggs = InternalAggregations.from(singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, emptyMap())));
result.aggregations(aggs);
result.setShardIndex(0);
consumer.consumeResult(result);
@ -405,7 +405,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN),
new DocValueFormat[0]);
aggs = new InternalAggregations(singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, emptyMap())));
aggs = InternalAggregations.from(singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, emptyMap())));
result.aggregations(aggs);
result.setShardIndex(2);
consumer.consumeResult(result);
@ -414,7 +414,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN),
new DocValueFormat[0]);
aggs = new InternalAggregations(singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, emptyMap())));
aggs = InternalAggregations.from(singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, emptyMap())));
result.aggregations(aggs);
result.setShardIndex(1);
consumer.consumeResult(result);
@ -483,7 +483,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
result.topDocs(new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number),
new DocValueFormat[0]);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", (double) number,
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new InternalMax("test", (double) number,
DocValueFormat.RAW, Collections.emptyMap())));
result.aggregations(aggs);
result.setShardIndex(id);
@ -526,7 +526,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number),
new DocValueFormat[0]);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", (double) number,
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new InternalMax("test", (double) number,
DocValueFormat.RAW, Collections.emptyMap())));
result.aggregations(aggs);
result.setShardIndex(i);
@ -878,7 +878,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
result.topDocs(new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[]{new ScoreDoc(0, number)}), number),
new DocValueFormat[0]);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(
new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyMap())));
result.aggregations(aggs);
result.setShardIndex(id);

View File

@ -370,7 +370,7 @@ public class SearchResponseMergerTests extends ESTestCase {
InternalDateRange.Bucket bucket = factory.createBucket("bucket", 0, 10000, count, InternalAggregations.EMPTY,
false, DocValueFormat.RAW);
InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap());
InternalAggregations aggs = new InternalAggregations(Arrays.asList(range, max));
InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max));
SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN);
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1);
SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, randomLong(),

View File

@ -303,6 +303,6 @@ public class AggregationsTests extends ESTestCase {
}
aggs.add(testCase.createTestInstance());
}
return new InternalAggregations(aggs);
return InternalAggregations.from(aggs);
}
}

View File

@ -63,7 +63,7 @@ public class InternalAggregationsTests extends ESTestCase {
public void testNonFinalReduceTopLevelPipelineAggs() {
InternalAggregation terms = new StringTerms("name", BucketOrder.key(true),
10, 1, Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0);
List<InternalAggregations> aggs = singletonList(new InternalAggregations(Collections.singletonList(terms)));
List<InternalAggregations> aggs = singletonList(InternalAggregations.from(Collections.singletonList(terms)));
InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(aggs, maxBucketReduceContext().forPartialReduction());
assertEquals(1, reducedAggs.getTopLevelPipelineAggregators().size());
assertEquals(1, reducedAggs.aggregations.size());
@ -73,7 +73,7 @@ public class InternalAggregationsTests extends ESTestCase {
InternalAggregation terms = new StringTerms("name", BucketOrder.key(true),
10, 1, Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(terms));
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(terms));
InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(Collections.singletonList(aggs),
maxBucketReduceContext().forFinalReduction());
assertEquals(0, reducedAggs.getTopLevelPipelineAggregators().size());
@ -145,7 +145,7 @@ public class InternalAggregationsTests extends ESTestCase {
aggregations.writeTo(out);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytesRef().bytes), registry)) {
in.setVersion(version);
InternalAggregations deserialized = new InternalAggregations(in);
InternalAggregations deserialized = InternalAggregations.readFrom(in);
assertEquals(aggregations.aggregations, deserialized.aggregations);
if (iteration < 2) {
/*

View File

@ -42,7 +42,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("the_avg");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -55,7 +55,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("the_avg.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -68,7 +68,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("foo.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -82,7 +82,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("the_avg.unknown");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -96,7 +96,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("_bucket_count");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -109,7 +109,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("_count");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -122,7 +122,7 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
AggregationPath path = AggregationPath.parse("_key");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -136,14 +136,14 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalStringAggs = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg));
List<StringTerms.Bucket> stringBuckets = Collections.singletonList(new StringTerms.Bucket(
new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1,
internalStringAggs, false, 0, DocValueFormat.RAW));
InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), 1, 0,
Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0);
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(termsAgg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
@ -156,14 +156,14 @@ public class InternalMultiBucketAggregationTests extends ESTestCase {
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalStringAggs = new InternalAggregations(Collections.singletonList(agg));
InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg));
List<StringTerms.Bucket> stringBuckets = Collections.singletonList(new StringTerms.Bucket(
new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1,
internalStringAggs, false, 0, DocValueFormat.RAW));
InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), 1, 0,
Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0);
InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(termsAgg));
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);

View File

@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregatio
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -65,7 +66,7 @@ public class InternalFilterTests extends InternalSingleBucketAggregationTestCase
InternalFilter dummy = createTestInstance();
InternalFilter inner = createTestInstance();
InternalAggregations sub = new InternalAggregations(singletonList(inner));
InternalAggregations sub = InternalAggregations.from(Collections.singletonList(inner));
InternalFilter test = createTestInstance("test", randomNonNegativeLong(), sub, emptyMap());
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Pipelin
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -132,7 +133,7 @@ public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase
InternalFilters dummy = createTestInstance();
InternalFilters inner = createTestInstance();
InternalAggregations sub = new InternalAggregations(singletonList(inner));
InternalAggregations sub = InternalAggregations.from(Collections.singletonList(inner));
InternalFilters test = createTestInstance("test", emptyMap(), sub);
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override

View File

@ -37,7 +37,6 @@ import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -73,8 +72,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
long key = startingDate + (intervalMillis * i);
buckets.add(i, new InternalAutoDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations));
}
InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList());
BucketInfo bucketInfo = new BucketInfo(roundingInfos, roundingIndex, subAggregations);
BucketInfo bucketInfo = new BucketInfo(roundingInfos, roundingIndex, InternalAggregations.EMPTY);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1);
}
@ -360,7 +358,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
ReduceTestBuilder bucket(String key, long docCount) {
buckets.add(new InternalAutoDateHistogram.Bucket(
utcMillis(key), docCount, FORMAT, new InternalAggregations(emptyList())));
utcMillis(key), docCount, FORMAT, InternalAggregations.EMPTY));
return this;
}
@ -376,7 +374,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
assertThat("rounding [" + whichRounding + "] should be in " + Arrays.toString(roundings), roundingIdx, greaterThan(-1));
assertTrue(Arrays.toString(roundings[roundingIdx].innerIntervals) + " must contain " + innerInterval,
Arrays.binarySearch(roundings[roundingIdx].innerIntervals, innerInterval) >= 0);
BucketInfo bucketInfo = new BucketInfo(roundings, roundingIdx, new InternalAggregations(emptyList()));
BucketInfo bucketInfo = new BucketInfo(roundings, roundingIdx, InternalAggregations.EMPTY);
results.add(new InternalAutoDateHistogram("test", new ArrayList<>(buckets), targetBuckets, bucketInfo,
FORMAT, emptyMap(), innerInterval));
buckets.clear();

View File

@ -62,7 +62,7 @@ public abstract class InternalSingleBucketAggregationTestCase<T extends Internal
if (hasInternalMin) {
aggs.add(new InternalMin("min", randomDouble(), randomNumericDocValueFormat(), emptyMap()));
}
return new InternalAggregations(aggs);
return InternalAggregations.from(aggs);
};
}
@ -93,7 +93,7 @@ public abstract class InternalSingleBucketAggregationTestCase<T extends Internal
List<InternalAggregation> aggs = new ArrayList<>();
aggs.add(new InternalMax("new_max", randomDouble(), randomNumericDocValueFormat(), emptyMap()));
aggs.add(new InternalMin("new_min", randomDouble(), randomNumericDocValueFormat(), emptyMap()));
aggregations = new InternalAggregations(aggs);
aggregations = InternalAggregations.from(aggs);
break;
case 3:
default:

View File

@ -78,7 +78,7 @@ public abstract class InternalMultiBucketAggregationTestCase<T extends InternalA
for (int i = 0; i < numAggregations; i++) {
aggs.add(createTestInstance(randomAlphaOfLength(5), emptyMap(), InternalAggregations.EMPTY));
}
return new InternalAggregations(aggs);
return InternalAggregations.from(aggs);
};
}
}

View File

@ -83,7 +83,7 @@ public class CumulativeCardinalityPipelineAggregator extends PipelineAggregator
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalSimpleLongValue(name(), cardinality, formatter, metadata()));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
newBuckets.add(newBucket);
}
return factory.createAggregation(newBuckets);

View File

@ -106,7 +106,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
}
newBuckets.add(newBucket);
index++;
@ -152,7 +152,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata()));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs));
newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
}
newBuckets.add(newBucket);
index++;

View File

@ -67,7 +67,7 @@ public class NormalizePipelineAggregator extends PipelineAggregator {
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toList());
aggs.add(new InternalSimpleValue(name(), normalizedBucketValue, formatter, metadata()));
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), bucket);
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket);
newBuckets.add(newBucket);
}

View File

@ -146,12 +146,12 @@ public class AsyncSearchTaskTests extends ESTestCase {
AsyncSearchTask task = createAsyncSearchTask();
task.getSearchProgressActionListener().onListShards(Collections.emptyList(), Collections.emptyList(),
SearchResponse.Clusters.EMPTY, false);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new StringTerms("name", BucketOrder.key(true), 1, 1,
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new StringTerms("name", BucketOrder.key(true), 1, 1,
Collections.emptyMap(), DocValueFormat.RAW, 1, false, 1, Collections.emptyList(), 0)));
//providing an empty named writeable registry will make the expansion fail, hence the delayed reduction will fail too
//causing an exception when executing getResponse as part of the completion listener callback
DelayableWriteable.Serialized<InternalAggregations> serializedAggs = DelayableWriteable.referencing(aggs)
.asSerialized(InternalAggregations::new, new NamedWriteableRegistry(Collections.emptyList()));
.asSerialized(InternalAggregations::readFrom, new NamedWriteableRegistry(Collections.emptyList()));
task.getSearchProgressActionListener().onPartialReduce(Collections.emptyList(), new TotalHits(0, TotalHits.Relation.EQUAL_TO),
serializedAggs, 1);
AtomicReference<AsyncSearchResponse> response = new AtomicReference<>();
@ -184,12 +184,12 @@ public class AsyncSearchTaskTests extends ESTestCase {
AsyncSearchTask task = createAsyncSearchTask();
task.getSearchProgressActionListener().onListShards(Collections.emptyList(), Collections.emptyList(),
SearchResponse.Clusters.EMPTY, false);
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new StringTerms("name", BucketOrder.key(true), 1, 1,
InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new StringTerms("name", BucketOrder.key(true), 1, 1,
Collections.emptyMap(), DocValueFormat.RAW, 1, false, 1, Collections.emptyList(), 0)));
//providing an empty named writeable registry will make the expansion fail, hence the delayed reduction will fail too
//causing an exception when executing getResponse as part of the completion listener callback
DelayableWriteable.Serialized<InternalAggregations> serializedAggs = DelayableWriteable.referencing(aggs)
.asSerialized(InternalAggregations::new, new NamedWriteableRegistry(Collections.emptyList()));
.asSerialized(InternalAggregations::readFrom, new NamedWriteableRegistry(Collections.emptyList()));
task.getSearchProgressActionListener().onPartialReduce(Collections.emptyList(), new TotalHits(0, TotalHits.Relation.EQUAL_TO),
serializedAggs, 1);
task.getSearchProgressActionListener().onFailure(new CircuitBreakingException("boom", CircuitBreaker.Durability.TRANSIENT));

View File

@ -107,7 +107,7 @@ public class InferencePipelineAggregator extends PipelineAggregator {
InternalInferenceAggregation aggResult = new InternalInferenceAggregation(name(), metadata(), inference);
aggs.add(aggResult);
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), bucket);
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket);
newBuckets.add(newBucket);
}

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.rollup;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.search.MultiSearchResponse;
@ -41,7 +39,6 @@ import org.elasticsearch.xpack.core.rollup.RollupField;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -57,8 +54,6 @@ import java.util.stream.Collectors;
*/
public class RollupResponseTranslator {
private static final Logger logger = LogManager.getLogger(RollupResponseTranslator.class);
/**
* Verifies a live-only search response. Essentially just checks for failure then returns
* the response since we have no work to do
@ -272,7 +267,7 @@ public class RollupResponseTranslator {
// The combination process returns a tree that is identical to the non-rolled
// which means we can use aggregation's reduce method to combine, just as if
// it was a result from another shard
InternalAggregations currentTree = new InternalAggregations(Collections.emptyList());
InternalAggregations currentTree = InternalAggregations.EMPTY;
InternalAggregation.ReduceContext finalReduceContext = InternalAggregation.ReduceContext.forFinalReduction(
reduceContext.bigArrays(), reduceContext.scriptService(), b -> {}, PipelineTree.EMPTY);
for (SearchResponse rolledResponse : rolledResponses) {
@ -291,7 +286,7 @@ public class RollupResponseTranslator {
// Iteratively merge in each new set of unrolled aggs, so that we can identify/fix overlapping doc_counts
// in the next round of unrolling
InternalAggregations finalUnrolledAggs = new InternalAggregations(unrolledAggs);
InternalAggregations finalUnrolledAggs = InternalAggregations.from(unrolledAggs);
currentTree = InternalAggregations.reduce(Arrays.asList(currentTree, finalUnrolledAggs), finalReduceContext);
}
@ -505,7 +500,7 @@ public class RollupResponseTranslator {
*/
private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket, InternalBucket original, InternalBucket currentTree) {
// Iterate over the subAggs in each bucket
return new InternalAggregations(bucket.getAggregations()
return InternalAggregations.from(bucket.getAggregations()
.asList().stream()
// Avoid any rollup count metrics, as that's not a true "sub-agg" but rather agg
// added by the rollup for accounting purposes (e.g. doc_count)

View File

@ -162,11 +162,11 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
when(count.getType()).thenReturn(SumAggregationBuilder.NAME);
subaggs.add(count);
when(filter.getAggregations()).thenReturn(new InternalAggregations(subaggs));
when(filter.getAggregations()).thenReturn(InternalAggregations.from(subaggs));
when(filter.getName()).thenReturn("filter_foo");
aggTree.add(filter);
Aggregations mockAggsWithout = new InternalAggregations(aggTree);
Aggregations mockAggsWithout = InternalAggregations.from(aggTree);
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{
@ -186,8 +186,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
SearchResponse responseWithout = mock(SearchResponse.class);
when(responseWithout.getTook()).thenReturn(new TimeValue(100));
Aggregations mockAggsWithout = new InternalAggregations(Collections.emptyList());
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
when(responseWithout.getAggregations()).thenReturn(InternalAggregations.EMPTY);
MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{
new MultiSearchResponse.Item(responseWithout, null)};
@ -258,11 +257,11 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
when(count.getType()).thenReturn(SumAggregationBuilder.NAME);
subaggs.add(count);
when(filter.getAggregations()).thenReturn(new InternalAggregations(subaggs));
when(filter.getAggregations()).thenReturn(InternalAggregations.from(subaggs));
when(filter.getName()).thenReturn("filter_foo");
aggTree.add(filter);
Aggregations mockAggs = new InternalAggregations(aggTree);
Aggregations mockAggs = InternalAggregations.from(aggTree);
when(response.getAggregations()).thenReturn(mockAggs);
MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null);
@ -298,7 +297,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
InternalMax protoMax = mock(InternalMax.class);
when(protoMax.getName()).thenReturn("foo");
protoAggTree.add(protoMax);
Aggregations protoMockAggs = new InternalAggregations(protoAggTree);
Aggregations protoMockAggs = InternalAggregations.from(protoAggTree);
when(protoResponse.getAggregations()).thenReturn(protoMockAggs);
MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null);
@ -307,7 +306,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
InternalMax max = mock(InternalMax.class);
when(max.getName()).thenReturn("bizzbuzz");
aggTreeWithoutFilter.add(max);
Aggregations mockAggsWithout = new InternalAggregations(aggTreeWithoutFilter);
Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter);
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null);
@ -328,7 +327,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
InternalMax protoMax = mock(InternalMax.class);
when(protoMax.getName()).thenReturn("foo");
protoAggTree.add(protoMax);
Aggregations protoMockAggs = new InternalAggregations(protoAggTree);
Aggregations protoMockAggs = InternalAggregations.from(protoAggTree);
when(protoResponse.getAggregations()).thenReturn(protoMockAggs);
MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null);
@ -336,7 +335,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
List<InternalAggregation> aggTreeWithoutFilter = new ArrayList<>(1);
InternalMax max = new InternalMax("filter_foo", 0, DocValueFormat.RAW, null);
aggTreeWithoutFilter.add(max);
Aggregations mockAggsWithout = new InternalAggregations(aggTreeWithoutFilter);
Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter);
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null);
@ -358,7 +357,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
List<InternalAggregation> protoAggTree = new ArrayList<>(1);
InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null);
protoAggTree.add(internalAvg);
Aggregations protoMockAggs = new InternalAggregations(protoAggTree);
Aggregations protoMockAggs = InternalAggregations.from(protoAggTree);
when(protoResponse.getAggregations()).thenReturn(protoMockAggs);
MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null);
@ -386,11 +385,11 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
when(count.getType()).thenReturn(SumAggregationBuilder.NAME);
subaggs.add(count);
when(filter.getAggregations()).thenReturn(new InternalAggregations(subaggs));
when(filter.getAggregations()).thenReturn(InternalAggregations.from(subaggs));
when(filter.getName()).thenReturn("filter_foo");
aggTree.add(filter);
Aggregations mockAggsWithout = new InternalAggregations(aggTree);
Aggregations mockAggsWithout = InternalAggregations.from(aggTree);
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null);
@ -495,11 +494,11 @@ public class RollupResponseTranslationTests extends AggregatorTestCase {
// TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream.
MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item(new SearchResponse(
new InternalSearchResponse(null,
new InternalAggregations(Collections.singletonList(responses.get(0))), null, null, false, false, 1),
InternalAggregations.from(Collections.singletonList(responses.get(0))), null, null, false, false, 1),
null, 1, 1, 0, 10, null, null), null);
MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item(new SearchResponse(
new InternalSearchResponse(null,
new InternalAggregations(Collections.singletonList(responses.get(1))), null, null, false, false, 1),
InternalAggregations.from(Collections.singletonList(responses.get(1))), null, null, false, false, 1),
null, 1, 1, 0, 10, null, null), null);
MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledItem, rolledItem};

View File

@ -657,11 +657,11 @@ public class SearchActionTests extends ESTestCase {
when(count.getType()).thenReturn(SumAggregationBuilder.NAME);
subaggs.add(count);
when(filter.getAggregations()).thenReturn(new InternalAggregations(subaggs));
when(filter.getAggregations()).thenReturn(InternalAggregations.from(subaggs));
when(filter.getName()).thenReturn("filter_foo");
aggTree.add(filter);
Aggregations mockAggs = new InternalAggregations(aggTree);
Aggregations mockAggs = InternalAggregations.from(aggTree);
when(response.getAggregations()).thenReturn(mockAggs);
MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null);
MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{item}, 1);
@ -747,7 +747,7 @@ public class SearchActionTests extends ESTestCase {
List<InternalAggregation> protoAggTree = new ArrayList<>(1);
InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null);
protoAggTree.add(internalAvg);
Aggregations protoMockAggs = new InternalAggregations(protoAggTree);
Aggregations protoMockAggs = InternalAggregations.from(protoAggTree);
when(protoResponse.getAggregations()).thenReturn(protoMockAggs);
MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null);
@ -775,11 +775,11 @@ public class SearchActionTests extends ESTestCase {
when(count.getType()).thenReturn(SumAggregationBuilder.NAME);
subaggs.add(count);
when(filter.getAggregations()).thenReturn(new InternalAggregations(subaggs));
when(filter.getAggregations()).thenReturn(InternalAggregations.from(subaggs));
when(filter.getName()).thenReturn("filter_foo");
aggTree.add(filter);
Aggregations mockAggsWithout = new InternalAggregations(aggTree);
Aggregations mockAggsWithout = InternalAggregations.from(aggTree);
when(responseWithout.getAggregations()).thenReturn(mockAggsWithout);
MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null);

View File

@ -634,7 +634,7 @@ public class RollupIndexerStateTests extends ESTestCase {
@Override
public Aggregations getAggregations() {
return new InternalAggregations(Collections.emptyList());
return InternalAggregations.EMPTY;
}
@Override
@ -747,7 +747,7 @@ public class RollupIndexerStateTests extends ESTestCase {
@Override
public Aggregations getAggregations() {
return new InternalAggregations(Collections.emptyList());
return InternalAggregations.EMPTY;
}
@Override
@ -904,7 +904,7 @@ public class RollupIndexerStateTests extends ESTestCase {
@Override
public Aggregations getAggregations() {
return new InternalAggregations(Collections.emptyList());
return InternalAggregations.EMPTY;
}
@Override