Add list of defered aggregations to the profiler (backport of #56208) (#56682)

This adds a few things to the `breakdown` of the profiler:
* `histogram` aggregations now contain `total_buckets` which is the
  count of buckets that they collected. This could be useful when
  debugging a histogram inside of another bucketing agg that is fairly
  selective.
* All bucketing aggs that can delay their sub-aggregations will now add
  a list of delayed sub-aggregations. This is useful because we
  sometimes have fairly involved logic around which sub-aggregations get
  delayed and this will save you from having to guess.
* Aggregtations wrapped in the `MultiBucketAggregatorWrapper` can't
  accurately add anything to the breakdown. Instead they the wrapper
  adds a marker entry `"multi_bucket_aggregator_wrapper": true` so we
  can be quickly pick out such aggregations when debugging.

It also fixes a bug where `_count` breakdown entries were contributing
to the overall `time_in_nanos`. They didn't add a large amount of time
so it is unlikely that this caused a big problem, but I was there.

To support the arbitrary breakdown data this reworks the profiler so
that the `breakdown` can contain any data that is supported by
`StreamOutput#writeGenericValue(Object)` and
`XContentBuilder#value(Object)`.
This commit is contained in:
Nik Everett 2020-05-13 16:33:22 -04:00 committed by GitHub
parent 1ad83c37c4
commit 126619ae3c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 533 additions and 357 deletions

View File

@ -794,6 +794,9 @@ This yields the following aggregation profile output:
"reduce_count" : 0,
"collect" : 91456,
"collect_count" : 4
},
"debug": {
"total_buckets": 4
}
},
{
@ -823,7 +826,10 @@ This yields the following aggregation profile output:
"initialize_count" : 1,
"reduce_count" : 0,
"collect" : 94052,
"collect_count" : 4
"collect_count" : 4,
},
"debug": {
"total_buckets": 4
}
}
]
@ -848,10 +854,15 @@ The `time_in_nanos` field shows the time executed by each aggregation, and is
inclusive of all children. While the overall time is useful, the `breakdown`
field will give detailed stats about how the time was spent.
Some aggregations may return expert `debug` information that describe features
of the underlying execution of the aggregation that are 'useful for folks that
hack on aggregations but that we don't expect to be otherwise useful. They can
vary wildly between versions, aggregations, and aggregation execution
strategies.
===== Timing Breakdown
The `breakdown` component lists detailed timing statistics about low-level
Lucene execution:
The `breakdown` component lists detailed statistics about low-level execution:
[source,js]
--------------------------------------------------

View File

@ -4,6 +4,7 @@ setup:
index: test_1
body:
settings:
number_of_shards: 1
number_of_replicas: 0
mappings:
properties:
@ -777,7 +778,66 @@ setup:
index: test_1
body: { "size" : 0, "aggs" : { "no_field_terms" : { "terms" : { "size": 1 } } } }
---
"profiler":
- skip:
version: " - 7.8.99"
reason: debug information added in 7.9.0
- do:
bulk:
index: test_1
refresh: true
body: |
{ "index": {} }
{ "str": "sheep", "number": 1 }
{ "index": {} }
{ "str": "sheep", "number": 3 }
{ "index": {} }
{ "str": "cow", "number": 1 }
{ "index": {} }
{ "str": "pig", "number": 1 }
- do:
search:
index: test_1
body:
profile: true
size: 0
aggs:
str_terms:
terms:
field: str
collect_mode: breadth_first
aggs:
max_number:
max:
field: number
- match: { aggregations.str_terms.buckets.0.key: sheep }
- match: { aggregations.str_terms.buckets.0.max_number.value: 3 }
- match: { aggregations.str_terms.buckets.1.key: cow }
- match: { aggregations.str_terms.buckets.1.max_number.value: 1 }
- match: { aggregations.str_terms.buckets.2.key: pig }
- match: { aggregations.str_terms.buckets.2.max_number.value: 1 }
- match: { profile.shards.0.aggregations.0.type: GlobalOrdinalsStringTermsAggregator }
- match: { profile.shards.0.aggregations.0.description: str_terms }
- match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 }
- match: { profile.shards.0.aggregations.0.debug.deferred_aggregators: [ max_number ] }
- match: { profile.shards.0.aggregations.0.children.0.type: MaxAggregator }
- match: { profile.shards.0.aggregations.0.children.0.description: max_number }
- do:
search:
index: test_1
body:
profile: true
size: 0
aggs:
n_terms:
terms:
field: number
- match: { aggregations.n_terms.buckets.0.key: 1 }
- match: { aggregations.n_terms.buckets.1.key: 3 }
- match: { profile.shards.0.aggregations.0.type: LongTermsAggregator }
- match: { profile.shards.0.aggregations.0.description: n_terms }
- match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 }
- match: { profile.shards.0.aggregations.0.debug.total_buckets: 2 }

View File

@ -32,6 +32,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@ -48,7 +49,17 @@ import static org.hamcrest.Matchers.notNullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class AggregationProfilerIT extends ESIntegTestCase {
private static final String COLLECT = AggregationTimingType.COLLECT.toString();
private static final String INITIALIZE = AggregationTimingType.INITIALIZE.toString();
private static final String BUILD_AGGREGATION = AggregationTimingType.BUILD_AGGREGATION.toString();
private static final String REDUCE = AggregationTimingType.REDUCE.toString();
private static final Set<String> BREAKDOWN_KEYS = org.elasticsearch.common.collect.Set.of(
COLLECT, INITIALIZE, BUILD_AGGREGATION, REDUCE,
COLLECT + "_count", INITIALIZE + "_count", BUILD_AGGREGATION + "_count", REDUCE + "_count");
private static final String TOTAL_BUCKETS = "total_buckets";
private static final String WRAPPED = "wrapped_in_multi_bucket_aggregator";
private static final Object DEFERRED = "deferred_aggregators";
private static final String NUMBER_FIELD = "number";
private static final String TAG_FIELD = "tag";
@ -81,7 +92,6 @@ public class AggregationProfilerIT extends ESIntegTestCase {
indexRandom(true, builders);
createIndex("idx_unmapped");
ensureSearchable();
}
public void testSimpleProfile() {
@ -107,15 +117,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map<String, Long> breakdown = histoAggResult.getTimeBreakdown();
assertThat(breakdown, notNullValue());
assertThat(breakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(breakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(breakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(breakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(breakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(breakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(breakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(breakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(breakdown.get(INITIALIZE), greaterThan(0L));
assertThat(breakdown.get(COLLECT), greaterThan(0L));
assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L));
assertThat(breakdown.get(REDUCE), equalTo(0L));
Map<String, Object> debug = histoAggResult.getDebugInfo();
assertThat(debug, notNullValue());
assertThat(debug.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
assertThat(((Number) debug.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
}
}
@ -151,14 +161,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
assertThat(histoDebugInfo, notNullValue());
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
@ -168,14 +179,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(termsAggResult.getTime(), greaterThan(0L));
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
assertThat(termsBreakdown, notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(termsBreakdown.get(REDUCE), equalTo(0L));
assertThat(termsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
@ -185,14 +194,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map<String, Long> avgBreakdown = termsAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
}
}
@ -221,14 +228,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
assertThat(histoDebugInfo, notNullValue());
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
@ -238,14 +246,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(termsAggResult.getTime(), greaterThan(0L));
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
assertThat(termsBreakdown, notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(termsBreakdown.get(REDUCE), equalTo(0L));
assertThat(termsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
@ -253,16 +259,14 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
assertThat(avgAggResult.getLuceneDescription(), equalTo("avg"));
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map<String, Long> avgBreakdown = termsAggResult.getTimeBreakdown();
Map<String, Long> avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
}
}
@ -289,16 +293,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
equalTo(DiversifiedOrdinalsSamplerAggregator.class.getSimpleName()));
assertThat(diversifyAggResult.getLuceneDescription(), equalTo("diversify"));
assertThat(diversifyAggResult.getTime(), greaterThan(0L));
Map<String, Long> histoBreakdown = diversifyAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
Map<String, Long> diversifyBreakdown = diversifyAggResult.getTimeBreakdown();
assertThat(diversifyBreakdown, notNullValue());
assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L));
assertThat(diversifyAggResult.getDebugInfo(), equalTo(
org.elasticsearch.common.collect.Map.of(DEFERRED, org.elasticsearch.common.collect.List.of("max"))));
assertThat(diversifyAggResult.getProfiledChildren().size(), equalTo(1));
ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0);
@ -306,16 +309,14 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
assertThat(maxAggResult.getLuceneDescription(), equalTo("max"));
assertThat(maxAggResult.getTime(), greaterThan(0L));
Map<String, Long> termsBreakdown = maxAggResult.getTimeBreakdown();
assertThat(termsBreakdown, notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
}
}
@ -352,14 +353,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
assertThat(histoDebugInfo, notNullValue());
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2));
Map<String, ProfileResult> histoAggResultSubAggregations = histoAggResult.getProfiledChildren().stream()
@ -371,14 +373,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(tagsAggResult.getTime(), greaterThan(0L));
Map<String, Long> tagsBreakdown = tagsAggResult.getTimeBreakdown();
assertThat(tagsBreakdown, notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
assertThat(tagsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
Map<String, ProfileResult> tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
@ -388,32 +388,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(avgAggResult, notNullValue());
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map<String, Long> avgBreakdown = tagsAggResult.getTimeBreakdown();
Map<String, Long> avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max");
assertThat(maxAggResult, notNullValue());
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
assertThat(maxAggResult.getTime(), greaterThan(0L));
Map<String, Long> maxBreakdown = tagsAggResult.getTimeBreakdown();
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings");
@ -422,14 +418,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(stringsAggResult.getTime(), greaterThan(0L));
Map<String, Long> stringsBreakdown = stringsAggResult.getTimeBreakdown();
assertThat(stringsBreakdown, notNullValue());
assertThat(stringsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(stringsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(stringsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(stringsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(stringsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(stringsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(stringsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(stringsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(stringsBreakdown.get(REDUCE), equalTo(0L));
assertThat(stringsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3));
Map<String, ProfileResult> stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren().stream()
@ -439,32 +433,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(avgAggResult, notNullValue());
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
assertThat(avgAggResult.getTime(), greaterThan(0L));
avgBreakdown = stringsAggResult.getTimeBreakdown();
avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
maxAggResult = stringsAggResultSubAggregations.get("max");
assertThat(maxAggResult, notNullValue());
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
assertThat(maxAggResult.getTime(), greaterThan(0L));
maxBreakdown = stringsAggResult.getTimeBreakdown();
maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
tagsAggResult = stringsAggResultSubAggregations.get("tags");
@ -474,14 +464,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(tagsAggResult.getTime(), greaterThan(0L));
tagsBreakdown = tagsAggResult.getTimeBreakdown();
assertThat(tagsBreakdown, notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
assertThat(tagsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
@ -491,32 +479,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
assertThat(avgAggResult, notNullValue());
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
assertThat(avgAggResult.getTime(), greaterThan(0L));
avgBreakdown = tagsAggResult.getTimeBreakdown();
avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
maxAggResult = tagsAggResultSubAggregations.get("max");
assertThat(maxAggResult, notNullValue());
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
assertThat(maxAggResult.getTime(), greaterThan(0L));
maxBreakdown = tagsAggResult.getTimeBreakdown();
maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationPath;
@ -34,12 +35,14 @@ import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Iterator;
import java.util.function.BiConsumer;
/**
* An Aggregator.
* <p>
* Be <strong>careful</strong> when adding methods to this class. If possible
* make sure they have sensible default implementations.
*/
// IMPORTANT: DO NOT add methods to this class unless strictly required.
// On the other hand, if you can remove methods from it, you are highly welcome!
public abstract class Aggregator extends BucketCollector implements Releasable {
/**
@ -176,6 +179,19 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
*/
public abstract InternalAggregation buildEmptyAggregation();
/**
* Collect debug information to add to the profiling results.. This will
* only be called if the aggregation is being profiled.
* <p>
* Well behaved implementations will always call the superclass
* implementation just in case it has something interesting. They will
* also only add objects which can be serialized with
* {@link StreamOutput#writeGenericValue(Object)} and
* {@link XContentBuilder#value(Object)}. And they'll have an integration
* test.
*/
public void collectDebugInfo(BiConsumer<String, Object> add) {}
/** Aggregation mode for sub aggregations. */
public enum SubAggCollectionMode implements Writeable {

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Map;
import java.util.function.BiConsumer;
import static org.elasticsearch.search.aggregations.support.AggregationUsageService.OTHER_SUBTYPE;
@ -178,6 +179,18 @@ public abstract class AggregatorFactory {
public void close() {
Releasables.close(aggregators, collectors);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
/*
* There isn't really a sane way to give our delegates a way to
* add entries because we'd have to merge them. So we just *don't*
* and leave a marker of our own. This ain't great, but we plan
* to cut down on usage of this wrapper in the future.
*/
add.accept("wrapped_in_multi_bucket_aggregator", true);
super.collectDebugInfo(add);
}
}
protected final String name;

View File

@ -30,6 +30,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
public abstract class DeferableBucketAggregator extends BucketsAggregator {
/**
@ -37,6 +38,7 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
* been deferred.
*/
private DeferringBucketCollector recordingWrapper;
private List<String> deferredAggregationNames;
protected DeferableBucketAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent,
Map<String, Object> metadata) throws IOException {
@ -45,21 +47,24 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
@Override
protected void doPreCollection() throws IOException {
List<BucketCollector> collectors = new ArrayList<>();
List<BucketCollector> deferredCollectors = new ArrayList<>();
List<BucketCollector> collectors = new ArrayList<>(subAggregators.length);
List<BucketCollector> deferredAggregations = null;
for (int i = 0; i < subAggregators.length; ++i) {
if (shouldDefer(subAggregators[i])) {
if (recordingWrapper == null) {
recordingWrapper = getDeferringCollector();
deferredAggregations = new ArrayList<>(subAggregators.length);
deferredAggregationNames = new ArrayList<>(subAggregators.length);
}
deferredCollectors.add(subAggregators[i]);
deferredAggregations.add(subAggregators[i]);
deferredAggregationNames.add(subAggregators[i].name());
subAggregators[i] = recordingWrapper.wrap(subAggregators[i]);
} else {
collectors.add(subAggregators[i]);
}
}
if (recordingWrapper != null) {
recordingWrapper.setDeferredCollector(deferredCollectors);
recordingWrapper.setDeferredCollector(deferredAggregations);
collectors.add(recordingWrapper);
}
collectableSubAggregators = MultiBucketCollector.wrap(collectors);
@ -100,4 +105,12 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
recordingWrapper.prepareSelectedBuckets(ordsToCollect);
}
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
if (deferredAggregationNames != null) {
add.accept("deferred_aggregators", deferredAggregationNames);
}
super.collectDebugInfo(add);
}
}

View File

@ -41,6 +41,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.function.BiConsumer;
/**
* An aggregator for numeric values. For a given {@code interval},
@ -160,4 +161,10 @@ public class NumericHistogramAggregator extends BucketsAggregator {
public void doClose() {
Releasables.close(bucketOrds);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
add.accept("total_buckets", bucketOrds.size());
super.collectDebugInfo(add);
}
}

View File

@ -51,6 +51,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
*/
public abstract long bucketsInOrd(long owningBucketOrd);
/**
* The number of collected buckets.
*/
public abstract long size();
/**
* Build an iterator for buckets inside {@code owningBucketOrd}.
* <p>
@ -113,6 +118,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
return ords.size();
}
@Override
public long size() {
return ords.size();
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
assert owningBucketOrd == 0;
@ -218,6 +228,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
return buckets.valueToThisBucketOrd.size();
}
@Override
public long size() {
return lastGlobalOrd + 1;
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
if (owningBucketOrd >= owningOrdToBuckets.size()) {

View File

@ -40,6 +40,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import static java.util.Collections.emptyList;
@ -186,4 +187,10 @@ public class LongTermsAggregator extends TermsAggregator {
super.doClose();
Releasables.close(bucketOrds);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
super.collectDebugInfo(add);
add.accept("total_buckets", bucketOrds.size());
}
}

View File

@ -26,11 +26,10 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
import java.util.Map;
public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBreakdown<?>, E> {
protected ArrayList<PB> timings;
protected ArrayList<PB> breakdowns;
/** Maps the Query to it's list of children. This is basically the dependency tree */
protected ArrayList<ArrayList<Integer>> tree;
/** A list of the original queries, keyed by index position */
@ -42,7 +41,7 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
private int currentToken = 0;
public AbstractInternalProfileTree() {
timings = new ArrayList<>(10);
breakdowns = new ArrayList<>(10);
stack = new ArrayDeque<>(10);
tree = new ArrayList<>(10);
elements = new ArrayList<>(10);
@ -94,14 +93,14 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
* Helper method to add a new node to the dependency tree.
*
* Initializes a new list in the dependency tree, saves the query and
* generates a new {@link QueryProfileBreakdown} to track the timings of
* this query
* generates a new {@link AbstractProfileBreakdown} to track the timings
* of this element.
*
* @param element
* The element to profile
* @param token
* The assigned token for this element
* @return A ProfileBreakdown to profile this element
* @return A {@link AbstractProfileBreakdown} to profile this element
*/
private PB addDependencyNode(E element, int token) {
@ -111,9 +110,9 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
// Save our query for lookup later
elements.add(element);
PB queryTimings = createProfileBreakdown();
timings.add(token, queryTimings);
return queryTimings;
PB breakdown = createProfileBreakdown();
breakdowns.add(token, breakdown);
return breakdown;
}
protected abstract PB createProfileBreakdown();
@ -126,14 +125,14 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
}
/**
* After the query has been run and profiled, we need to merge the flat timing map
* After the element has been run and profiled, we need to merge the flat timing map
* with the dependency graph to build a data structure that mirrors the original
* query tree
*
* @return a hierarchical representation of the profiled query tree
*/
public List<ProfileResult> getTree() {
ArrayList<ProfileResult> results = new ArrayList<>(5);
ArrayList<ProfileResult> results = new ArrayList<>(roots.size());
for (Integer root : roots) {
results.add(doGetTree(root));
}
@ -147,8 +146,7 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
*/
private ProfileResult doGetTree(int token) {
E element = elements.get(token);
PB breakdown = timings.get(token);
Map<String, Long> timings = breakdown.toTimingMap();
PB breakdown = breakdowns.get(token);
List<Integer> children = tree.get(token);
List<ProfileResult> childrenProfileResults = Collections.emptyList();
@ -164,7 +162,8 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
// calculating the same times over and over...but worth the effort?
String type = getTypeFromElement(element);
String description = getDescriptionFromElement(element);
return new ProfileResult(type, description, timings, childrenProfileResults);
return new ProfileResult(type, description, breakdown.toBreakdownMap(), breakdown.toDebugMap(),
breakdown.toNodeTime(), childrenProfileResults);
}
protected abstract String getTypeFromElement(E element);

View File

@ -23,6 +23,8 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.emptyMap;
/**
* A record of timings for the various operations that may happen during query execution.
* A node's time may be composed of several internal attributes (rewriting, weighting,
@ -53,13 +55,30 @@ public abstract class AbstractProfileBreakdown<T extends Enum<T>> {
timings[timing.ordinal()] = timer;
}
/** Convert this record to a map from timingType to times. */
public Map<String, Long> toTimingMap() {
Map<String, Long> map = new HashMap<>();
/**
* Build a timing count breakdown.
*/
public final Map<String, Long> toBreakdownMap() {
Map<String, Long> map = new HashMap<>(timings.length * 2);
for (T timingType : timingTypes) {
map.put(timingType.toString(), timings[timingType.ordinal()].getApproximateTiming());
map.put(timingType.toString() + "_count", timings[timingType.ordinal()].getCount());
}
return Collections.unmodifiableMap(map);
}
/**
* Fetch extra debugging information.
*/
protected Map<String, Object> toDebugMap() {
return emptyMap();
}
public final long toNodeTime() {
long total = 0;
for (T timingType : timingTypes) {
total += timings[timingType.ordinal()].getApproximateTiming();
}
return total;
}
}

View File

@ -19,56 +19,60 @@
package org.elasticsearch.search.profile;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* This class is the internal representation of a profiled Query, corresponding
* to a single node in the query tree. It is built after the query has finished executing
* and is merely a structured representation, rather than the entity that collects the timing
* profile (see InternalProfiler for that)
*
* <p>
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
* "children" queries if applicable
*/
public final class ProfileResult implements Writeable, ToXContentObject {
static final ParseField TYPE = new ParseField("type");
static final ParseField DESCRIPTION = new ParseField("description");
static final ParseField BREAKDOWN = new ParseField("breakdown");
static final ParseField DEBUG = new ParseField("debug");
static final ParseField NODE_TIME = new ParseField("time");
static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
static final ParseField CHILDREN = new ParseField("children");
static final ParseField BREAKDOWN = new ParseField("breakdown");
private final String type;
private final String description;
private final Map<String, Long> timings;
private final Map<String, Long> breakdown;
private final Map<String, Object> debug;
private final long nodeTime;
private final List<ProfileResult> children;
public ProfileResult(String type, String description, Map<String, Long> timings, List<ProfileResult> children) {
public ProfileResult(String type, String description, Map<String, Long> breakdown, Map<String, Object> debug,
long nodeTime, List<ProfileResult> children) {
this.type = type;
this.description = description;
this.timings = Objects.requireNonNull(timings, "required timings argument missing");
this.children = children;
this.nodeTime = getTotalTime(timings);
this.breakdown = Objects.requireNonNull(breakdown, "required breakdown argument missing");
this.debug = debug == null ? org.elasticsearch.common.collect.Map.of() : debug;
this.children = children == null ? org.elasticsearch.common.collect.List.of() : children;
this.nodeTime = nodeTime;
}
/**
@ -78,19 +82,13 @@ public final class ProfileResult implements Writeable, ToXContentObject {
this.type = in.readString();
this.description = in.readString();
this.nodeTime = in.readLong();
int timingsSize = in.readVInt();
this.timings = new HashMap<>(timingsSize);
for (int i = 0; i < timingsSize; ++i) {
timings.put(in.readString(), in.readLong());
}
int size = in.readVInt();
this.children = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
children.add(new ProfileResult(in));
breakdown = in.readMap(StreamInput::readString, StreamInput::readLong);
if (in.getVersion().onOrAfter(Version.V_7_9_0)) {
debug = in.readMap(StreamInput::readString, StreamInput::readGenericValue);
} else {
debug = org.elasticsearch.common.collect.Map.of();
}
children = in.readList(ProfileResult::new);
}
@Override
@ -98,15 +96,11 @@ public final class ProfileResult implements Writeable, ToXContentObject {
out.writeString(type);
out.writeString(description);
out.writeLong(nodeTime); // not Vlong because can be negative
out.writeVInt(timings.size());
for (Map.Entry<String, Long> entry : timings.entrySet()) {
out.writeString(entry.getKey());
out.writeLong(entry.getValue());
}
out.writeVInt(children.size());
for (ProfileResult child : children) {
child.writeTo(out);
out.writeMap(breakdown, StreamOutput::writeString, StreamOutput::writeLong);
if (out.getVersion().onOrAfter(Version.V_7_9_0)) {
out.writeMap(debug, StreamOutput::writeString, StreamOutput::writeGenericValue);
}
out.writeList(children);
}
/**
@ -117,17 +111,24 @@ public final class ProfileResult implements Writeable, ToXContentObject {
}
/**
* Retrieve the name of the query (e.g. "TermQuery")
* Retrieve the name of the entry (e.g. "TermQuery" or "LongTermsAggregator")
*/
public String getQueryName() {
return type;
}
/**
* Returns the timing breakdown for this particular query node
* The timing breakdown for this node.
*/
public Map<String, Long> getTimeBreakdown() {
return Collections.unmodifiableMap(timings);
return Collections.unmodifiableMap(breakdown);
}
/**
* The debug information about the profiled execution.
*/
public Map<String, Object> getDebugInfo() {
return Collections.unmodifiableMap(debug);
}
/**
@ -155,79 +156,35 @@ public final class ProfileResult implements Writeable, ToXContentObject {
builder.field(NODE_TIME.getPreferredName(), new TimeValue(getTime(), TimeUnit.NANOSECONDS).toString());
}
builder.field(NODE_TIME_RAW.getPreferredName(), getTime());
builder.field(BREAKDOWN.getPreferredName(), timings);
builder.field(BREAKDOWN.getPreferredName(), breakdown);
if (false == debug.isEmpty()) {
builder.field(DEBUG.getPreferredName(), debug);
}
if (!children.isEmpty()) {
builder = builder.startArray(CHILDREN.getPreferredName());
if (false == children.isEmpty()) {
builder.startArray(CHILDREN.getPreferredName());
for (ProfileResult child : children) {
builder = child.toXContent(builder, params);
}
builder = builder.endArray();
builder.endArray();
}
builder = builder.endObject();
return builder;
return builder.endObject();
}
public static ProfileResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
String currentFieldName = null;
String type = null, description = null;
Map<String, Long> timings = new HashMap<>();
List<ProfileResult> children = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
type = parser.text();
} else if (DESCRIPTION.match(currentFieldName, parser.getDeprecationHandler())) {
description = parser.text();
} else if (NODE_TIME.match(currentFieldName, parser.getDeprecationHandler())) {
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
parser.text();
} else if (NODE_TIME_RAW.match(currentFieldName, parser.getDeprecationHandler())) {
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
parser.longValue();
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (BREAKDOWN.match(currentFieldName, parser.getDeprecationHandler())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String name = parser.currentName();
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, parser.nextToken(), parser::getTokenLocation);
long value = parser.longValue();
timings.put(name, value);
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (CHILDREN.match(currentFieldName, parser.getDeprecationHandler())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
children.add(ProfileResult.fromXContent(parser));
}
} else {
parser.skipChildren();
}
}
}
return new ProfileResult(type, description, timings, children);
private static final InstantiatingObjectParser<ProfileResult, Void> PARSER;
static {
InstantiatingObjectParser.Builder<ProfileResult, Void> parser =
InstantiatingObjectParser.builder("profile_result", true, ProfileResult.class);
parser.declareString(constructorArg(), TYPE);
parser.declareString(constructorArg(), DESCRIPTION);
parser.declareObject(constructorArg(), (p, c) -> p.map(), BREAKDOWN);
parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG);
parser.declareLong(constructorArg(), NODE_TIME_RAW);
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN);
PARSER = parser.build();
}
/**
* @param timings a map of breakdown timing for the node
* @return The total time at this node
*/
private static long getTotalTime(Map<String, Long> timings) {
long nodeTime = 0;
for (long time : timings.values()) {
nodeTime += time;
}
return nodeTime;
public static ProfileResult fromXContent(XContentParser p) throws IOException {
return PARSER.parse(p, null);
}
}

View File

@ -21,10 +21,30 @@ package org.elasticsearch.search.profile.aggregation;
import org.elasticsearch.search.profile.AbstractProfileBreakdown;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
/**
* {@linkplain AbstractProfileBreakdown} customized to work with aggregations.
*/
public class AggregationProfileBreakdown extends AbstractProfileBreakdown<AggregationTimingType> {
private final Map<String, Object> extra = new HashMap<>();
public AggregationProfileBreakdown() {
super(AggregationTimingType.class);
}
/**
* Add extra debugging information about the aggregation.
*/
public void addDebugInfo(String key, Object value) {
extra.put(key, value);
}
@Override
protected Map<String, Object> toDebugMap() {
return unmodifiableMap(extra);
}
}

View File

@ -91,6 +91,7 @@ public class ProfilingAggregator extends Aggregator {
return delegate.buildAggregations(owningBucketOrds);
} finally {
timer.stop();
delegate.collectDebugInfo(profileBreakdown::addDebugInfo);
}
}

View File

@ -54,6 +54,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
Set<Long> seen = new HashSet<>();
seen.add(0L);
seen.add(1000L);
assertThat(ords.size(), equalTo(2L));
long[] values = new long[scaledRandomIntBetween(1, 10000)];
for (int i = 0; i < values.length; i++) {
values[i] = randomValueOtherThanMany(seen::contains, ESTestCase::randomLong);
@ -61,6 +62,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
}
for (int i = 0; i < values.length; i++) {
assertThat(ords.add(0, values[i]), equalTo(i + 2L));
assertThat(ords.size(), equalTo(i + 3L));
if (randomBoolean()) {
assertThat(ords.add(0, 0), equalTo(-1L));
}
@ -102,6 +104,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
assertThat(ords.add(1, 0), equalTo(1L));
assertThat(ords.add(0, 0), equalTo(-1L));
assertThat(ords.add(1, 0), equalTo(-2L));
assertThat(ords.size(), equalTo(2L));
// And some random values
Set<OwningBucketOrdAndValue> seen = new HashSet<>();
@ -116,6 +119,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
}
for (int i = 0; i < values.length; i++) {
assertThat(ords.add(values[i].owningBucketOrd, values[i].value), equalTo(i + 2L));
assertThat(ords.size(), equalTo(i + 3L));
if (randomBoolean()) {
assertThat(ords.add(0, 0), equalTo(-1L));
}

View File

@ -30,8 +30,8 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
@ -46,22 +46,27 @@ public class ProfileResultTests extends ESTestCase {
public static ProfileResult createTestItem(int depth) {
String type = randomAlphaOfLengthBetween(5, 10);
String description = randomAlphaOfLengthBetween(5, 10);
int timingsSize = randomIntBetween(0, 5);
Map<String, Long> timings = new HashMap<>(timingsSize);
for (int i = 0; i < timingsSize; i++) {
long time = randomNonNegativeLong() / timingsSize;
int breakdownsSize = randomIntBetween(0, 5);
Map<String, Long> breakdown = new HashMap<>(breakdownsSize);
while (breakdown.size() < breakdownsSize) {
long value = randomNonNegativeLong();
if (randomBoolean()) {
// also often use "small" values in tests
time = randomNonNegativeLong() % 10000;
value = value % 10000;
}
timings.put(randomAlphaOfLengthBetween(5, 10), time); // don't overflow Long.MAX_VALUE;
breakdown.put(randomAlphaOfLengthBetween(5, 10), value);
}
int debugSize = randomIntBetween(0, 5);
Map<String, Object> debug = new HashMap<>(debugSize);
while (debug.size() < debugSize) {
debug.put(randomAlphaOfLength(5), randomAlphaOfLength(4));
}
int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0;
List<ProfileResult> children = new ArrayList<>(childrenSize);
for (int i = 0; i < childrenSize; i++) {
children.add(createTestItem(depth - 1));
}
return new ProfileResult(type, description, timings, children);
return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children);
}
public void testFromXContent() throws IOException {
@ -83,8 +88,9 @@ public class ProfileResultTests extends ESTestCase {
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
BytesReference mutated;
if (addRandomFields) {
// "breakdown" just consists of key/value pairs, we shouldn't add anything random there
Predicate<String> excludeFilter = (s) -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName());
// "breakdown" and "debug" just consists of key/value pairs, we shouldn't add anything random there
Predicate<String> excludeFilter = (s) ->
s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) || s.endsWith(ProfileResult.DEBUG.getPreferredName());
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else {
mutated = originalBytes;
@ -102,12 +108,28 @@ public class ProfileResultTests extends ESTestCase {
public void testToXContent() throws IOException {
List<ProfileResult> children = new ArrayList<>();
children.add(new ProfileResult("child1", "desc1", Collections.singletonMap("key1", 100L), Collections.emptyList()));
children.add(new ProfileResult("child2", "desc2", Collections.singletonMap("key1", 123356L), Collections.emptyList()));
Map<String, Long> timings3 = new HashMap<>();
timings3.put("key1", 123456L);
timings3.put("key2", 100000L);
ProfileResult result = new ProfileResult("someType", "some description", timings3, children);
children.add(new ProfileResult(
"child1",
"desc1",
org.elasticsearch.common.collect.Map.of("key1", 100L),
org.elasticsearch.common.collect.Map.of(),
100L,
org.elasticsearch.common.collect.List.of())
);
children.add(new ProfileResult(
"child2",
"desc2",
org.elasticsearch.common.collect.Map.of("key1", 123356L),
org.elasticsearch.common.collect.Map.of(),
123356L,
org.elasticsearch.common.collect.List.of()));
Map<String, Long> breakdown = new LinkedHashMap<>();
breakdown.put("key1", 123456L);
breakdown.put("stuff", 10000L);
Map<String, Object> debug = new LinkedHashMap<>();
debug.put("a", "foo");
debug.put("b", "bar");
ProfileResult result = new ProfileResult("someType", "some description", breakdown, debug, 223456L, children);
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
@ -116,7 +138,11 @@ public class ProfileResultTests extends ESTestCase {
" \"time_in_nanos\" : 223456,\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 123456,\n" +
" \"key2\" : 100000\n" +
" \"stuff\" : 10000\n" +
" },\n" +
" \"debug\" : {\n" +
" \"a\" : \"foo\",\n" +
" \"b\" : \"bar\"\n" +
" },\n" +
" \"children\" : [\n" +
" {\n" +
@ -147,7 +173,11 @@ public class ProfileResultTests extends ESTestCase {
" \"time_in_nanos\" : 223456,\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 123456,\n" +
" \"key2\" : 100000\n" +
" \"stuff\" : 10000\n" +
" },\n" +
" \"debug\" : {\n" +
" \"a\" : \"foo\",\n" +
" \"b\" : \"bar\"\n" +
" },\n" +
" \"children\" : [\n" +
" {\n" +
@ -171,7 +201,14 @@ public class ProfileResultTests extends ESTestCase {
" ]\n" +
"}", Strings.toString(builder));
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 12345678L), Collections.emptyList());
result = new ProfileResult(
"profileName",
"some description",
org.elasticsearch.common.collect.Map.of("key1", 12345678L),
org.elasticsearch.common.collect.Map.of(),
12345678L,
org.elasticsearch.common.collect.List.of()
);
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
@ -184,8 +221,14 @@ public class ProfileResultTests extends ESTestCase {
" }\n" +
"}", Strings.toString(builder));
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 1234567890L),
Collections.emptyList());
result = new ProfileResult(
"profileName",
"some description",
org.elasticsearch.common.collect.Map.of("key1", 1234567890L),
org.elasticsearch.common.collect.Map.of(),
1234567890L,
org.elasticsearch.common.collect.List.of()
);
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +

View File

@ -80,7 +80,9 @@ public class SearchProfileShardResultsTests extends ESTestCase {
if (addRandomFields) {
// The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there
// also we don't want to insert into the root object here, its just the PROFILE_FIELD itself
Predicate<String> excludeFilter = (s) -> (s.isEmpty() || s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()));
Predicate<String> excludeFilter = (s) -> s.isEmpty()
|| s.endsWith(ProfileResult.BREAKDOWN.getPreferredName())
|| s.endsWith(ProfileResult.DEBUG.getPreferredName());
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else {
mutated = originalBytes;

View File

@ -31,7 +31,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -69,10 +69,13 @@ public class AggregationProfileShardResultTests extends ESTestCase {
public void testToXContent() throws IOException {
List<ProfileResult> profileResults = new ArrayList<>();
Map<String, Long> timings = new HashMap<>();
timings.put("timing1", 2000L);
timings.put("timing2", 4000L);
ProfileResult profileResult = new ProfileResult("someType", "someDescription", timings, Collections.emptyList());
Map<String, Long> breakdown = new LinkedHashMap<>();
breakdown.put("timing1", 2000L);
breakdown.put("timing2", 4000L);
Map<String, Object> debug = new LinkedHashMap<>();
debug.put("stuff", "stuff");
debug.put("other_stuff", org.elasticsearch.common.collect.List.of("foo", "bar"));
ProfileResult profileResult = new ProfileResult("someType", "someDescription", breakdown, debug,6000L, Collections.emptyList());
profileResults.add(profileResult);
AggregationProfileShardResult aggProfileResults = new AggregationProfileShardResult(profileResults);
BytesReference xContent = toXContent(aggProfileResults, XContentType.JSON, false);
@ -80,7 +83,8 @@ public class AggregationProfileShardResultTests extends ESTestCase {
+ "{\"type\":\"someType\","
+ "\"description\":\"someDescription\","
+ "\"time_in_nanos\":6000,"
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000}"
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000},"
+ "\"debug\":{\"stuff\":\"stuff\",\"other_stuff\":[\"foo\",\"bar\"]}"
+ "}"
+ "]}", xContent.utf8ToString());
@ -90,7 +94,8 @@ public class AggregationProfileShardResultTests extends ESTestCase {
+ "\"description\":\"someDescription\","
+ "\"time\":\"6micros\","
+ "\"time_in_nanos\":6000,"
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000}"
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000},"
+ "\"debug\":{\"stuff\":\"stuff\",\"other_stuff\":[\"foo\",\"bar\"]}"
+ "}"
+ "]}", xContent.utf8ToString());
}

View File

@ -111,19 +111,19 @@ public class QueryProfilerTests extends ESTestCase {
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L));
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
@ -137,19 +137,19 @@ public class QueryProfilerTests extends ESTestCase {
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L));
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
@ -175,19 +175,19 @@ public class QueryProfilerTests extends ESTestCase {
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L));
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), greaterThan(0L));
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));