This adds a few things to the `breakdown` of the profiler: * `histogram` aggregations now contain `total_buckets` which is the count of buckets that they collected. This could be useful when debugging a histogram inside of another bucketing agg that is fairly selective. * All bucketing aggs that can delay their sub-aggregations will now add a list of delayed sub-aggregations. This is useful because we sometimes have fairly involved logic around which sub-aggregations get delayed and this will save you from having to guess. * Aggregtations wrapped in the `MultiBucketAggregatorWrapper` can't accurately add anything to the breakdown. Instead they the wrapper adds a marker entry `"multi_bucket_aggregator_wrapper": true` so we can be quickly pick out such aggregations when debugging. It also fixes a bug where `_count` breakdown entries were contributing to the overall `time_in_nanos`. They didn't add a large amount of time so it is unlikely that this caused a big problem, but I was there. To support the arbitrary breakdown data this reworks the profiler so that the `breakdown` can contain any data that is supported by `StreamOutput#writeGenericValue(Object)` and `XContentBuilder#value(Object)`.
This commit is contained in:
parent
1ad83c37c4
commit
126619ae3c
|
@ -794,6 +794,9 @@ This yields the following aggregation profile output:
|
||||||
"reduce_count" : 0,
|
"reduce_count" : 0,
|
||||||
"collect" : 91456,
|
"collect" : 91456,
|
||||||
"collect_count" : 4
|
"collect_count" : 4
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"total_buckets": 4
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -823,7 +826,10 @@ This yields the following aggregation profile output:
|
||||||
"initialize_count" : 1,
|
"initialize_count" : 1,
|
||||||
"reduce_count" : 0,
|
"reduce_count" : 0,
|
||||||
"collect" : 94052,
|
"collect" : 94052,
|
||||||
"collect_count" : 4
|
"collect_count" : 4,
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"total_buckets": 4
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -848,10 +854,15 @@ The `time_in_nanos` field shows the time executed by each aggregation, and is
|
||||||
inclusive of all children. While the overall time is useful, the `breakdown`
|
inclusive of all children. While the overall time is useful, the `breakdown`
|
||||||
field will give detailed stats about how the time was spent.
|
field will give detailed stats about how the time was spent.
|
||||||
|
|
||||||
|
Some aggregations may return expert `debug` information that describe features
|
||||||
|
of the underlying execution of the aggregation that are 'useful for folks that
|
||||||
|
hack on aggregations but that we don't expect to be otherwise useful. They can
|
||||||
|
vary wildly between versions, aggregations, and aggregation execution
|
||||||
|
strategies.
|
||||||
|
|
||||||
===== Timing Breakdown
|
===== Timing Breakdown
|
||||||
|
|
||||||
The `breakdown` component lists detailed timing statistics about low-level
|
The `breakdown` component lists detailed statistics about low-level execution:
|
||||||
Lucene execution:
|
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
|
@ -4,6 +4,7 @@ setup:
|
||||||
index: test_1
|
index: test_1
|
||||||
body:
|
body:
|
||||||
settings:
|
settings:
|
||||||
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
properties:
|
properties:
|
||||||
|
@ -777,7 +778,66 @@ setup:
|
||||||
index: test_1
|
index: test_1
|
||||||
body: { "size" : 0, "aggs" : { "no_field_terms" : { "terms" : { "size": 1 } } } }
|
body: { "size" : 0, "aggs" : { "no_field_terms" : { "terms" : { "size": 1 } } } }
|
||||||
|
|
||||||
|
---
|
||||||
|
"profiler":
|
||||||
|
- skip:
|
||||||
|
version: " - 7.8.99"
|
||||||
|
reason: debug information added in 7.9.0
|
||||||
|
- do:
|
||||||
|
bulk:
|
||||||
|
index: test_1
|
||||||
|
refresh: true
|
||||||
|
body: |
|
||||||
|
{ "index": {} }
|
||||||
|
{ "str": "sheep", "number": 1 }
|
||||||
|
{ "index": {} }
|
||||||
|
{ "str": "sheep", "number": 3 }
|
||||||
|
{ "index": {} }
|
||||||
|
{ "str": "cow", "number": 1 }
|
||||||
|
{ "index": {} }
|
||||||
|
{ "str": "pig", "number": 1 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test_1
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
size: 0
|
||||||
|
aggs:
|
||||||
|
str_terms:
|
||||||
|
terms:
|
||||||
|
field: str
|
||||||
|
collect_mode: breadth_first
|
||||||
|
aggs:
|
||||||
|
max_number:
|
||||||
|
max:
|
||||||
|
field: number
|
||||||
|
- match: { aggregations.str_terms.buckets.0.key: sheep }
|
||||||
|
- match: { aggregations.str_terms.buckets.0.max_number.value: 3 }
|
||||||
|
- match: { aggregations.str_terms.buckets.1.key: cow }
|
||||||
|
- match: { aggregations.str_terms.buckets.1.max_number.value: 1 }
|
||||||
|
- match: { aggregations.str_terms.buckets.2.key: pig }
|
||||||
|
- match: { aggregations.str_terms.buckets.2.max_number.value: 1 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.type: GlobalOrdinalsStringTermsAggregator }
|
||||||
|
- match: { profile.shards.0.aggregations.0.description: str_terms }
|
||||||
|
- match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.deferred_aggregators: [ max_number ] }
|
||||||
|
- match: { profile.shards.0.aggregations.0.children.0.type: MaxAggregator }
|
||||||
|
- match: { profile.shards.0.aggregations.0.children.0.description: max_number }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test_1
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
size: 0
|
||||||
|
aggs:
|
||||||
|
n_terms:
|
||||||
|
terms:
|
||||||
|
field: number
|
||||||
|
- match: { aggregations.n_terms.buckets.0.key: 1 }
|
||||||
|
- match: { aggregations.n_terms.buckets.1.key: 3 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.type: LongTermsAggregator }
|
||||||
|
- match: { profile.shards.0.aggregations.0.description: n_terms }
|
||||||
|
- match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.total_buckets: 2 }
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
@ -48,7 +49,17 @@ import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
@ESIntegTestCase.SuiteScopeTestCase
|
@ESIntegTestCase.SuiteScopeTestCase
|
||||||
public class AggregationProfilerIT extends ESIntegTestCase {
|
public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
|
private static final String COLLECT = AggregationTimingType.COLLECT.toString();
|
||||||
|
private static final String INITIALIZE = AggregationTimingType.INITIALIZE.toString();
|
||||||
|
private static final String BUILD_AGGREGATION = AggregationTimingType.BUILD_AGGREGATION.toString();
|
||||||
|
private static final String REDUCE = AggregationTimingType.REDUCE.toString();
|
||||||
|
private static final Set<String> BREAKDOWN_KEYS = org.elasticsearch.common.collect.Set.of(
|
||||||
|
COLLECT, INITIALIZE, BUILD_AGGREGATION, REDUCE,
|
||||||
|
COLLECT + "_count", INITIALIZE + "_count", BUILD_AGGREGATION + "_count", REDUCE + "_count");
|
||||||
|
|
||||||
|
private static final String TOTAL_BUCKETS = "total_buckets";
|
||||||
|
private static final String WRAPPED = "wrapped_in_multi_bucket_aggregator";
|
||||||
|
private static final Object DEFERRED = "deferred_aggregators";
|
||||||
|
|
||||||
private static final String NUMBER_FIELD = "number";
|
private static final String NUMBER_FIELD = "number";
|
||||||
private static final String TAG_FIELD = "tag";
|
private static final String TAG_FIELD = "tag";
|
||||||
|
@ -81,7 +92,6 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
indexRandom(true, builders);
|
indexRandom(true, builders);
|
||||||
createIndex("idx_unmapped");
|
createIndex("idx_unmapped");
|
||||||
ensureSearchable();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleProfile() {
|
public void testSimpleProfile() {
|
||||||
|
@ -107,15 +117,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> breakdown = histoAggResult.getTimeBreakdown();
|
Map<String, Long> breakdown = histoAggResult.getTimeBreakdown();
|
||||||
assertThat(breakdown, notNullValue());
|
assertThat(breakdown, notNullValue());
|
||||||
assertThat(breakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(breakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(breakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(breakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(breakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(breakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L));
|
||||||
assertThat(breakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(breakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(breakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
Map<String, Object> debug = histoAggResult.getDebugInfo();
|
||||||
assertThat(breakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
assertThat(debug, notNullValue());
|
||||||
assertThat(breakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
assertThat(debug.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
|
||||||
|
assertThat(((Number) debug.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,14 +161,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
||||||
assertThat(histoBreakdown, notNullValue());
|
assertThat(histoBreakdown, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
assertThat(histoDebugInfo, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
|
||||||
|
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
|
||||||
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
|
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
|
||||||
|
|
||||||
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
|
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
|
||||||
|
@ -168,14 +179,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(termsAggResult.getTime(), greaterThan(0L));
|
assertThat(termsAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
|
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
|
||||||
assertThat(termsBreakdown, notNullValue());
|
assertThat(termsBreakdown, notNullValue());
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(termsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(termsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
|
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
|
||||||
|
|
||||||
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
|
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
|
||||||
|
@ -185,14 +194,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> avgBreakdown = termsAggResult.getTimeBreakdown();
|
Map<String, Long> avgBreakdown = termsAggResult.getTimeBreakdown();
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -221,14 +228,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
||||||
assertThat(histoBreakdown, notNullValue());
|
assertThat(histoBreakdown, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
assertThat(histoDebugInfo, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
|
||||||
|
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
|
||||||
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
|
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(1));
|
||||||
|
|
||||||
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
|
ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0);
|
||||||
|
@ -238,14 +246,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(termsAggResult.getTime(), greaterThan(0L));
|
assertThat(termsAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
|
Map<String, Long> termsBreakdown = termsAggResult.getTimeBreakdown();
|
||||||
assertThat(termsBreakdown, notNullValue());
|
assertThat(termsBreakdown, notNullValue());
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(termsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(termsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
|
assertThat(termsAggResult.getProfiledChildren().size(), equalTo(1));
|
||||||
|
|
||||||
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
|
ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0);
|
||||||
|
@ -253,16 +259,14 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
||||||
assertThat(avgAggResult.getLuceneDescription(), equalTo("avg"));
|
assertThat(avgAggResult.getLuceneDescription(), equalTo("avg"));
|
||||||
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> avgBreakdown = termsAggResult.getTimeBreakdown();
|
Map<String, Long> avgBreakdown = avgAggResult.getTimeBreakdown();
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -289,16 +293,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
equalTo(DiversifiedOrdinalsSamplerAggregator.class.getSimpleName()));
|
equalTo(DiversifiedOrdinalsSamplerAggregator.class.getSimpleName()));
|
||||||
assertThat(diversifyAggResult.getLuceneDescription(), equalTo("diversify"));
|
assertThat(diversifyAggResult.getLuceneDescription(), equalTo("diversify"));
|
||||||
assertThat(diversifyAggResult.getTime(), greaterThan(0L));
|
assertThat(diversifyAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> histoBreakdown = diversifyAggResult.getTimeBreakdown();
|
Map<String, Long> diversifyBreakdown = diversifyAggResult.getTimeBreakdown();
|
||||||
assertThat(histoBreakdown, notNullValue());
|
assertThat(diversifyBreakdown, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(diversifyAggResult.getDebugInfo(), equalTo(
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
org.elasticsearch.common.collect.Map.of(DEFERRED, org.elasticsearch.common.collect.List.of("max"))));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(diversifyAggResult.getProfiledChildren().size(), equalTo(1));
|
assertThat(diversifyAggResult.getProfiledChildren().size(), equalTo(1));
|
||||||
|
|
||||||
ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0);
|
ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0);
|
||||||
|
@ -306,16 +309,14 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
||||||
assertThat(maxAggResult.getLuceneDescription(), equalTo("max"));
|
assertThat(maxAggResult.getLuceneDescription(), equalTo("max"));
|
||||||
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> termsBreakdown = maxAggResult.getTimeBreakdown();
|
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
|
||||||
assertThat(termsBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(termsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -352,14 +353,15 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
assertThat(histoAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
Map<String, Long> histoBreakdown = histoAggResult.getTimeBreakdown();
|
||||||
assertThat(histoBreakdown, notNullValue());
|
assertThat(histoBreakdown, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
assertThat(histoDebugInfo, notNullValue());
|
||||||
assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
assertThat(histoDebugInfo.keySet(), equalTo(org.elasticsearch.common.collect.Set.of(TOTAL_BUCKETS)));
|
||||||
|
assertThat(((Number) histoDebugInfo.get(TOTAL_BUCKETS)).longValue(), greaterThan(0L));
|
||||||
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2));
|
assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2));
|
||||||
|
|
||||||
Map<String, ProfileResult> histoAggResultSubAggregations = histoAggResult.getProfiledChildren().stream()
|
Map<String, ProfileResult> histoAggResultSubAggregations = histoAggResult.getProfiledChildren().stream()
|
||||||
|
@ -371,14 +373,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(tagsAggResult.getTime(), greaterThan(0L));
|
assertThat(tagsAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> tagsBreakdown = tagsAggResult.getTimeBreakdown();
|
Map<String, Long> tagsBreakdown = tagsAggResult.getTimeBreakdown();
|
||||||
assertThat(tagsBreakdown, notNullValue());
|
assertThat(tagsBreakdown, notNullValue());
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(tagsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
|
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
|
||||||
|
|
||||||
Map<String, ProfileResult> tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
|
Map<String, ProfileResult> tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
|
||||||
|
@ -388,32 +388,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgAggResult, notNullValue());
|
assertThat(avgAggResult, notNullValue());
|
||||||
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
||||||
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> avgBreakdown = tagsAggResult.getTimeBreakdown();
|
Map<String, Long> avgBreakdown = avgAggResult.getTimeBreakdown();
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
|
|
||||||
ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max");
|
ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max");
|
||||||
assertThat(maxAggResult, notNullValue());
|
assertThat(maxAggResult, notNullValue());
|
||||||
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
||||||
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> maxBreakdown = tagsAggResult.getTimeBreakdown();
|
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
|
|
||||||
ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings");
|
ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings");
|
||||||
|
@ -422,14 +418,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(stringsAggResult.getTime(), greaterThan(0L));
|
assertThat(stringsAggResult.getTime(), greaterThan(0L));
|
||||||
Map<String, Long> stringsBreakdown = stringsAggResult.getTimeBreakdown();
|
Map<String, Long> stringsBreakdown = stringsAggResult.getTimeBreakdown();
|
||||||
assertThat(stringsBreakdown, notNullValue());
|
assertThat(stringsBreakdown, notNullValue());
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(stringsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(stringsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(stringsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3));
|
assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3));
|
||||||
|
|
||||||
Map<String, ProfileResult> stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren().stream()
|
Map<String, ProfileResult> stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren().stream()
|
||||||
|
@ -439,32 +433,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgAggResult, notNullValue());
|
assertThat(avgAggResult, notNullValue());
|
||||||
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
||||||
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
||||||
avgBreakdown = stringsAggResult.getTimeBreakdown();
|
avgBreakdown = avgAggResult.getTimeBreakdown();
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
|
|
||||||
maxAggResult = stringsAggResultSubAggregations.get("max");
|
maxAggResult = stringsAggResultSubAggregations.get("max");
|
||||||
assertThat(maxAggResult, notNullValue());
|
assertThat(maxAggResult, notNullValue());
|
||||||
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
||||||
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
||||||
maxBreakdown = stringsAggResult.getTimeBreakdown();
|
maxBreakdown = maxAggResult.getTimeBreakdown();
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
|
|
||||||
tagsAggResult = stringsAggResultSubAggregations.get("tags");
|
tagsAggResult = stringsAggResultSubAggregations.get("tags");
|
||||||
|
@ -474,14 +464,12 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(tagsAggResult.getTime(), greaterThan(0L));
|
assertThat(tagsAggResult.getTime(), greaterThan(0L));
|
||||||
tagsBreakdown = tagsAggResult.getTimeBreakdown();
|
tagsBreakdown = tagsAggResult.getTimeBreakdown();
|
||||||
assertThat(tagsBreakdown, notNullValue());
|
assertThat(tagsBreakdown, notNullValue());
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(tagsAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of(WRAPPED, true)));
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
|
assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2));
|
||||||
|
|
||||||
tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
|
tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream()
|
||||||
|
@ -491,32 +479,28 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgAggResult, notNullValue());
|
assertThat(avgAggResult, notNullValue());
|
||||||
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator"));
|
||||||
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
assertThat(avgAggResult.getTime(), greaterThan(0L));
|
||||||
avgBreakdown = tagsAggResult.getTimeBreakdown();
|
avgBreakdown = avgAggResult.getTimeBreakdown();
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
|
|
||||||
maxAggResult = tagsAggResultSubAggregations.get("max");
|
maxAggResult = tagsAggResultSubAggregations.get("max");
|
||||||
assertThat(maxAggResult, notNullValue());
|
assertThat(maxAggResult, notNullValue());
|
||||||
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator"));
|
||||||
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
assertThat(maxAggResult.getTime(), greaterThan(0L));
|
||||||
maxBreakdown = tagsAggResult.getTimeBreakdown();
|
maxBreakdown = maxAggResult.getTimeBreakdown();
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), notNullValue());
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.INITIALIZE.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), notNullValue());
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.COLLECT.toString()), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), notNullValue());
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.BUILD_AGGREGATION.toString()), greaterThan(0L));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), notNullValue());
|
|
||||||
assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L));
|
|
||||||
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.AggregationPath;
|
import org.elasticsearch.search.aggregations.support.AggregationPath;
|
||||||
|
@ -34,12 +35,14 @@ import org.elasticsearch.search.sort.SortOrder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An Aggregator.
|
* An Aggregator.
|
||||||
|
* <p>
|
||||||
|
* Be <strong>careful</strong> when adding methods to this class. If possible
|
||||||
|
* make sure they have sensible default implementations.
|
||||||
*/
|
*/
|
||||||
// IMPORTANT: DO NOT add methods to this class unless strictly required.
|
|
||||||
// On the other hand, if you can remove methods from it, you are highly welcome!
|
|
||||||
public abstract class Aggregator extends BucketCollector implements Releasable {
|
public abstract class Aggregator extends BucketCollector implements Releasable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -176,6 +179,19 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
||||||
*/
|
*/
|
||||||
public abstract InternalAggregation buildEmptyAggregation();
|
public abstract InternalAggregation buildEmptyAggregation();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect debug information to add to the profiling results.. This will
|
||||||
|
* only be called if the aggregation is being profiled.
|
||||||
|
* <p>
|
||||||
|
* Well behaved implementations will always call the superclass
|
||||||
|
* implementation just in case it has something interesting. They will
|
||||||
|
* also only add objects which can be serialized with
|
||||||
|
* {@link StreamOutput#writeGenericValue(Object)} and
|
||||||
|
* {@link XContentBuilder#value(Object)}. And they'll have an integration
|
||||||
|
* test.
|
||||||
|
*/
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {}
|
||||||
|
|
||||||
/** Aggregation mode for sub aggregations. */
|
/** Aggregation mode for sub aggregations. */
|
||||||
public enum SubAggCollectionMode implements Writeable {
|
public enum SubAggCollectionMode implements Writeable {
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
import static org.elasticsearch.search.aggregations.support.AggregationUsageService.OTHER_SUBTYPE;
|
import static org.elasticsearch.search.aggregations.support.AggregationUsageService.OTHER_SUBTYPE;
|
||||||
|
|
||||||
|
@ -178,6 +179,18 @@ public abstract class AggregatorFactory {
|
||||||
public void close() {
|
public void close() {
|
||||||
Releasables.close(aggregators, collectors);
|
Releasables.close(aggregators, collectors);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
/*
|
||||||
|
* There isn't really a sane way to give our delegates a way to
|
||||||
|
* add entries because we'd have to merge them. So we just *don't*
|
||||||
|
* and leave a marker of our own. This ain't great, but we plan
|
||||||
|
* to cut down on usage of this wrapper in the future.
|
||||||
|
*/
|
||||||
|
add.accept("wrapped_in_multi_bucket_aggregator", true);
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final String name;
|
protected final String name;
|
||||||
|
|
|
@ -30,6 +30,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
public abstract class DeferableBucketAggregator extends BucketsAggregator {
|
public abstract class DeferableBucketAggregator extends BucketsAggregator {
|
||||||
/**
|
/**
|
||||||
|
@ -37,6 +38,7 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
|
||||||
* been deferred.
|
* been deferred.
|
||||||
*/
|
*/
|
||||||
private DeferringBucketCollector recordingWrapper;
|
private DeferringBucketCollector recordingWrapper;
|
||||||
|
private List<String> deferredAggregationNames;
|
||||||
|
|
||||||
protected DeferableBucketAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent,
|
protected DeferableBucketAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent,
|
||||||
Map<String, Object> metadata) throws IOException {
|
Map<String, Object> metadata) throws IOException {
|
||||||
|
@ -45,21 +47,24 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doPreCollection() throws IOException {
|
protected void doPreCollection() throws IOException {
|
||||||
List<BucketCollector> collectors = new ArrayList<>();
|
List<BucketCollector> collectors = new ArrayList<>(subAggregators.length);
|
||||||
List<BucketCollector> deferredCollectors = new ArrayList<>();
|
List<BucketCollector> deferredAggregations = null;
|
||||||
for (int i = 0; i < subAggregators.length; ++i) {
|
for (int i = 0; i < subAggregators.length; ++i) {
|
||||||
if (shouldDefer(subAggregators[i])) {
|
if (shouldDefer(subAggregators[i])) {
|
||||||
if (recordingWrapper == null) {
|
if (recordingWrapper == null) {
|
||||||
recordingWrapper = getDeferringCollector();
|
recordingWrapper = getDeferringCollector();
|
||||||
|
deferredAggregations = new ArrayList<>(subAggregators.length);
|
||||||
|
deferredAggregationNames = new ArrayList<>(subAggregators.length);
|
||||||
}
|
}
|
||||||
deferredCollectors.add(subAggregators[i]);
|
deferredAggregations.add(subAggregators[i]);
|
||||||
|
deferredAggregationNames.add(subAggregators[i].name());
|
||||||
subAggregators[i] = recordingWrapper.wrap(subAggregators[i]);
|
subAggregators[i] = recordingWrapper.wrap(subAggregators[i]);
|
||||||
} else {
|
} else {
|
||||||
collectors.add(subAggregators[i]);
|
collectors.add(subAggregators[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (recordingWrapper != null) {
|
if (recordingWrapper != null) {
|
||||||
recordingWrapper.setDeferredCollector(deferredCollectors);
|
recordingWrapper.setDeferredCollector(deferredAggregations);
|
||||||
collectors.add(recordingWrapper);
|
collectors.add(recordingWrapper);
|
||||||
}
|
}
|
||||||
collectableSubAggregators = MultiBucketCollector.wrap(collectors);
|
collectableSubAggregators = MultiBucketCollector.wrap(collectors);
|
||||||
|
@ -100,4 +105,12 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator {
|
||||||
recordingWrapper.prepareSelectedBuckets(ordsToCollect);
|
recordingWrapper.prepareSelectedBuckets(ordsToCollect);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
if (deferredAggregationNames != null) {
|
||||||
|
add.accept("deferred_aggregators", deferredAggregationNames);
|
||||||
|
}
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An aggregator for numeric values. For a given {@code interval},
|
* An aggregator for numeric values. For a given {@code interval},
|
||||||
|
@ -160,4 +161,10 @@ public class NumericHistogramAggregator extends BucketsAggregator {
|
||||||
public void doClose() {
|
public void doClose() {
|
||||||
Releasables.close(bucketOrds);
|
Releasables.close(bucketOrds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
add.accept("total_buckets", bucketOrds.size());
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
|
||||||
*/
|
*/
|
||||||
public abstract long bucketsInOrd(long owningBucketOrd);
|
public abstract long bucketsInOrd(long owningBucketOrd);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of collected buckets.
|
||||||
|
*/
|
||||||
|
public abstract long size();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build an iterator for buckets inside {@code owningBucketOrd}.
|
* Build an iterator for buckets inside {@code owningBucketOrd}.
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -113,6 +118,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
|
||||||
return ords.size();
|
return ords.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() {
|
||||||
|
return ords.size();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
|
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
|
||||||
assert owningBucketOrd == 0;
|
assert owningBucketOrd == 0;
|
||||||
|
@ -218,6 +228,11 @@ public abstract class LongKeyedBucketOrds implements Releasable {
|
||||||
return buckets.valueToThisBucketOrd.size();
|
return buckets.valueToThisBucketOrd.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() {
|
||||||
|
return lastGlobalOrd + 1;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
|
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
|
||||||
if (owningBucketOrd >= owningOrdToBuckets.size()) {
|
if (owningBucketOrd >= owningOrdToBuckets.size()) {
|
||||||
|
|
|
@ -40,6 +40,7 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
|
|
||||||
|
@ -186,4 +187,10 @@ public class LongTermsAggregator extends TermsAggregator {
|
||||||
super.doClose();
|
super.doClose();
|
||||||
Releasables.close(bucketOrds);
|
Releasables.close(bucketOrds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
add.accept("total_buckets", bucketOrds.size());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,11 +26,10 @@ import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Deque;
|
import java.util.Deque;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBreakdown<?>, E> {
|
public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBreakdown<?>, E> {
|
||||||
|
|
||||||
protected ArrayList<PB> timings;
|
protected ArrayList<PB> breakdowns;
|
||||||
/** Maps the Query to it's list of children. This is basically the dependency tree */
|
/** Maps the Query to it's list of children. This is basically the dependency tree */
|
||||||
protected ArrayList<ArrayList<Integer>> tree;
|
protected ArrayList<ArrayList<Integer>> tree;
|
||||||
/** A list of the original queries, keyed by index position */
|
/** A list of the original queries, keyed by index position */
|
||||||
|
@ -42,7 +41,7 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
private int currentToken = 0;
|
private int currentToken = 0;
|
||||||
|
|
||||||
public AbstractInternalProfileTree() {
|
public AbstractInternalProfileTree() {
|
||||||
timings = new ArrayList<>(10);
|
breakdowns = new ArrayList<>(10);
|
||||||
stack = new ArrayDeque<>(10);
|
stack = new ArrayDeque<>(10);
|
||||||
tree = new ArrayList<>(10);
|
tree = new ArrayList<>(10);
|
||||||
elements = new ArrayList<>(10);
|
elements = new ArrayList<>(10);
|
||||||
|
@ -94,14 +93,14 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
* Helper method to add a new node to the dependency tree.
|
* Helper method to add a new node to the dependency tree.
|
||||||
*
|
*
|
||||||
* Initializes a new list in the dependency tree, saves the query and
|
* Initializes a new list in the dependency tree, saves the query and
|
||||||
* generates a new {@link QueryProfileBreakdown} to track the timings of
|
* generates a new {@link AbstractProfileBreakdown} to track the timings
|
||||||
* this query
|
* of this element.
|
||||||
*
|
*
|
||||||
* @param element
|
* @param element
|
||||||
* The element to profile
|
* The element to profile
|
||||||
* @param token
|
* @param token
|
||||||
* The assigned token for this element
|
* The assigned token for this element
|
||||||
* @return A ProfileBreakdown to profile this element
|
* @return A {@link AbstractProfileBreakdown} to profile this element
|
||||||
*/
|
*/
|
||||||
private PB addDependencyNode(E element, int token) {
|
private PB addDependencyNode(E element, int token) {
|
||||||
|
|
||||||
|
@ -111,9 +110,9 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
// Save our query for lookup later
|
// Save our query for lookup later
|
||||||
elements.add(element);
|
elements.add(element);
|
||||||
|
|
||||||
PB queryTimings = createProfileBreakdown();
|
PB breakdown = createProfileBreakdown();
|
||||||
timings.add(token, queryTimings);
|
breakdowns.add(token, breakdown);
|
||||||
return queryTimings;
|
return breakdown;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract PB createProfileBreakdown();
|
protected abstract PB createProfileBreakdown();
|
||||||
|
@ -126,14 +125,14 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* After the query has been run and profiled, we need to merge the flat timing map
|
* After the element has been run and profiled, we need to merge the flat timing map
|
||||||
* with the dependency graph to build a data structure that mirrors the original
|
* with the dependency graph to build a data structure that mirrors the original
|
||||||
* query tree
|
* query tree
|
||||||
*
|
*
|
||||||
* @return a hierarchical representation of the profiled query tree
|
* @return a hierarchical representation of the profiled query tree
|
||||||
*/
|
*/
|
||||||
public List<ProfileResult> getTree() {
|
public List<ProfileResult> getTree() {
|
||||||
ArrayList<ProfileResult> results = new ArrayList<>(5);
|
ArrayList<ProfileResult> results = new ArrayList<>(roots.size());
|
||||||
for (Integer root : roots) {
|
for (Integer root : roots) {
|
||||||
results.add(doGetTree(root));
|
results.add(doGetTree(root));
|
||||||
}
|
}
|
||||||
|
@ -147,8 +146,7 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
*/
|
*/
|
||||||
private ProfileResult doGetTree(int token) {
|
private ProfileResult doGetTree(int token) {
|
||||||
E element = elements.get(token);
|
E element = elements.get(token);
|
||||||
PB breakdown = timings.get(token);
|
PB breakdown = breakdowns.get(token);
|
||||||
Map<String, Long> timings = breakdown.toTimingMap();
|
|
||||||
List<Integer> children = tree.get(token);
|
List<Integer> children = tree.get(token);
|
||||||
List<ProfileResult> childrenProfileResults = Collections.emptyList();
|
List<ProfileResult> childrenProfileResults = Collections.emptyList();
|
||||||
|
|
||||||
|
@ -164,7 +162,8 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
|
||||||
// calculating the same times over and over...but worth the effort?
|
// calculating the same times over and over...but worth the effort?
|
||||||
String type = getTypeFromElement(element);
|
String type = getTypeFromElement(element);
|
||||||
String description = getDescriptionFromElement(element);
|
String description = getDescriptionFromElement(element);
|
||||||
return new ProfileResult(type, description, timings, childrenProfileResults);
|
return new ProfileResult(type, description, breakdown.toBreakdownMap(), breakdown.toDebugMap(),
|
||||||
|
breakdown.toNodeTime(), childrenProfileResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract String getTypeFromElement(E element);
|
protected abstract String getTypeFromElement(E element);
|
||||||
|
|
|
@ -23,6 +23,8 @@ import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A record of timings for the various operations that may happen during query execution.
|
* A record of timings for the various operations that may happen during query execution.
|
||||||
* A node's time may be composed of several internal attributes (rewriting, weighting,
|
* A node's time may be composed of several internal attributes (rewriting, weighting,
|
||||||
|
@ -53,13 +55,30 @@ public abstract class AbstractProfileBreakdown<T extends Enum<T>> {
|
||||||
timings[timing.ordinal()] = timer;
|
timings[timing.ordinal()] = timer;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Convert this record to a map from timingType to times. */
|
/**
|
||||||
public Map<String, Long> toTimingMap() {
|
* Build a timing count breakdown.
|
||||||
Map<String, Long> map = new HashMap<>();
|
*/
|
||||||
|
public final Map<String, Long> toBreakdownMap() {
|
||||||
|
Map<String, Long> map = new HashMap<>(timings.length * 2);
|
||||||
for (T timingType : timingTypes) {
|
for (T timingType : timingTypes) {
|
||||||
map.put(timingType.toString(), timings[timingType.ordinal()].getApproximateTiming());
|
map.put(timingType.toString(), timings[timingType.ordinal()].getApproximateTiming());
|
||||||
map.put(timingType.toString() + "_count", timings[timingType.ordinal()].getCount());
|
map.put(timingType.toString() + "_count", timings[timingType.ordinal()].getCount());
|
||||||
}
|
}
|
||||||
return Collections.unmodifiableMap(map);
|
return Collections.unmodifiableMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch extra debugging information.
|
||||||
|
*/
|
||||||
|
protected Map<String, Object> toDebugMap() {
|
||||||
|
return emptyMap();
|
||||||
|
}
|
||||||
|
|
||||||
|
public final long toNodeTime() {
|
||||||
|
long total = 0;
|
||||||
|
for (T timingType : timingTypes) {
|
||||||
|
total += timings[timingType.ordinal()].getApproximateTiming();
|
||||||
|
}
|
||||||
|
return total;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,56 +19,60 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.profile;
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is the internal representation of a profiled Query, corresponding
|
* This class is the internal representation of a profiled Query, corresponding
|
||||||
* to a single node in the query tree. It is built after the query has finished executing
|
* to a single node in the query tree. It is built after the query has finished executing
|
||||||
* and is merely a structured representation, rather than the entity that collects the timing
|
* and is merely a structured representation, rather than the entity that collects the timing
|
||||||
* profile (see InternalProfiler for that)
|
* profile (see InternalProfiler for that)
|
||||||
*
|
* <p>
|
||||||
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
|
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
|
||||||
* "children" queries if applicable
|
* "children" queries if applicable
|
||||||
*/
|
*/
|
||||||
public final class ProfileResult implements Writeable, ToXContentObject {
|
public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
|
|
||||||
static final ParseField TYPE = new ParseField("type");
|
static final ParseField TYPE = new ParseField("type");
|
||||||
static final ParseField DESCRIPTION = new ParseField("description");
|
static final ParseField DESCRIPTION = new ParseField("description");
|
||||||
|
static final ParseField BREAKDOWN = new ParseField("breakdown");
|
||||||
|
static final ParseField DEBUG = new ParseField("debug");
|
||||||
static final ParseField NODE_TIME = new ParseField("time");
|
static final ParseField NODE_TIME = new ParseField("time");
|
||||||
static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
|
static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
|
||||||
static final ParseField CHILDREN = new ParseField("children");
|
static final ParseField CHILDREN = new ParseField("children");
|
||||||
static final ParseField BREAKDOWN = new ParseField("breakdown");
|
|
||||||
|
|
||||||
private final String type;
|
private final String type;
|
||||||
private final String description;
|
private final String description;
|
||||||
private final Map<String, Long> timings;
|
private final Map<String, Long> breakdown;
|
||||||
|
private final Map<String, Object> debug;
|
||||||
private final long nodeTime;
|
private final long nodeTime;
|
||||||
private final List<ProfileResult> children;
|
private final List<ProfileResult> children;
|
||||||
|
|
||||||
public ProfileResult(String type, String description, Map<String, Long> timings, List<ProfileResult> children) {
|
public ProfileResult(String type, String description, Map<String, Long> breakdown, Map<String, Object> debug,
|
||||||
|
long nodeTime, List<ProfileResult> children) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.description = description;
|
this.description = description;
|
||||||
this.timings = Objects.requireNonNull(timings, "required timings argument missing");
|
this.breakdown = Objects.requireNonNull(breakdown, "required breakdown argument missing");
|
||||||
this.children = children;
|
this.debug = debug == null ? org.elasticsearch.common.collect.Map.of() : debug;
|
||||||
this.nodeTime = getTotalTime(timings);
|
this.children = children == null ? org.elasticsearch.common.collect.List.of() : children;
|
||||||
|
this.nodeTime = nodeTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -78,19 +82,13 @@ public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
this.type = in.readString();
|
this.type = in.readString();
|
||||||
this.description = in.readString();
|
this.description = in.readString();
|
||||||
this.nodeTime = in.readLong();
|
this.nodeTime = in.readLong();
|
||||||
|
breakdown = in.readMap(StreamInput::readString, StreamInput::readLong);
|
||||||
int timingsSize = in.readVInt();
|
if (in.getVersion().onOrAfter(Version.V_7_9_0)) {
|
||||||
this.timings = new HashMap<>(timingsSize);
|
debug = in.readMap(StreamInput::readString, StreamInput::readGenericValue);
|
||||||
for (int i = 0; i < timingsSize; ++i) {
|
} else {
|
||||||
timings.put(in.readString(), in.readLong());
|
debug = org.elasticsearch.common.collect.Map.of();
|
||||||
}
|
|
||||||
|
|
||||||
int size = in.readVInt();
|
|
||||||
this.children = new ArrayList<>(size);
|
|
||||||
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
children.add(new ProfileResult(in));
|
|
||||||
}
|
}
|
||||||
|
children = in.readList(ProfileResult::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -98,15 +96,11 @@ public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
out.writeString(type);
|
out.writeString(type);
|
||||||
out.writeString(description);
|
out.writeString(description);
|
||||||
out.writeLong(nodeTime); // not Vlong because can be negative
|
out.writeLong(nodeTime); // not Vlong because can be negative
|
||||||
out.writeVInt(timings.size());
|
out.writeMap(breakdown, StreamOutput::writeString, StreamOutput::writeLong);
|
||||||
for (Map.Entry<String, Long> entry : timings.entrySet()) {
|
if (out.getVersion().onOrAfter(Version.V_7_9_0)) {
|
||||||
out.writeString(entry.getKey());
|
out.writeMap(debug, StreamOutput::writeString, StreamOutput::writeGenericValue);
|
||||||
out.writeLong(entry.getValue());
|
|
||||||
}
|
|
||||||
out.writeVInt(children.size());
|
|
||||||
for (ProfileResult child : children) {
|
|
||||||
child.writeTo(out);
|
|
||||||
}
|
}
|
||||||
|
out.writeList(children);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -117,17 +111,24 @@ public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve the name of the query (e.g. "TermQuery")
|
* Retrieve the name of the entry (e.g. "TermQuery" or "LongTermsAggregator")
|
||||||
*/
|
*/
|
||||||
public String getQueryName() {
|
public String getQueryName() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the timing breakdown for this particular query node
|
* The timing breakdown for this node.
|
||||||
*/
|
*/
|
||||||
public Map<String, Long> getTimeBreakdown() {
|
public Map<String, Long> getTimeBreakdown() {
|
||||||
return Collections.unmodifiableMap(timings);
|
return Collections.unmodifiableMap(breakdown);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The debug information about the profiled execution.
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getDebugInfo() {
|
||||||
|
return Collections.unmodifiableMap(debug);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -155,79 +156,35 @@ public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
builder.field(NODE_TIME.getPreferredName(), new TimeValue(getTime(), TimeUnit.NANOSECONDS).toString());
|
builder.field(NODE_TIME.getPreferredName(), new TimeValue(getTime(), TimeUnit.NANOSECONDS).toString());
|
||||||
}
|
}
|
||||||
builder.field(NODE_TIME_RAW.getPreferredName(), getTime());
|
builder.field(NODE_TIME_RAW.getPreferredName(), getTime());
|
||||||
builder.field(BREAKDOWN.getPreferredName(), timings);
|
builder.field(BREAKDOWN.getPreferredName(), breakdown);
|
||||||
|
if (false == debug.isEmpty()) {
|
||||||
|
builder.field(DEBUG.getPreferredName(), debug);
|
||||||
|
}
|
||||||
|
|
||||||
if (!children.isEmpty()) {
|
if (false == children.isEmpty()) {
|
||||||
builder = builder.startArray(CHILDREN.getPreferredName());
|
builder.startArray(CHILDREN.getPreferredName());
|
||||||
for (ProfileResult child : children) {
|
for (ProfileResult child : children) {
|
||||||
builder = child.toXContent(builder, params);
|
builder = child.toXContent(builder, params);
|
||||||
}
|
}
|
||||||
builder = builder.endArray();
|
builder.endArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
builder = builder.endObject();
|
return builder.endObject();
|
||||||
return builder;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ProfileResult fromXContent(XContentParser parser) throws IOException {
|
private static final InstantiatingObjectParser<ProfileResult, Void> PARSER;
|
||||||
XContentParser.Token token = parser.currentToken();
|
static {
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
|
InstantiatingObjectParser.Builder<ProfileResult, Void> parser =
|
||||||
String currentFieldName = null;
|
InstantiatingObjectParser.builder("profile_result", true, ProfileResult.class);
|
||||||
String type = null, description = null;
|
parser.declareString(constructorArg(), TYPE);
|
||||||
Map<String, Long> timings = new HashMap<>();
|
parser.declareString(constructorArg(), DESCRIPTION);
|
||||||
List<ProfileResult> children = new ArrayList<>();
|
parser.declareObject(constructorArg(), (p, c) -> p.map(), BREAKDOWN);
|
||||||
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG);
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
parser.declareLong(constructorArg(), NODE_TIME_RAW);
|
||||||
currentFieldName = parser.currentName();
|
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN);
|
||||||
} else if (token.isValue()) {
|
PARSER = parser.build();
|
||||||
if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
type = parser.text();
|
|
||||||
} else if (DESCRIPTION.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
description = parser.text();
|
|
||||||
} else if (NODE_TIME.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
|
|
||||||
parser.text();
|
|
||||||
} else if (NODE_TIME_RAW.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
|
|
||||||
parser.longValue();
|
|
||||||
} else {
|
|
||||||
parser.skipChildren();
|
|
||||||
}
|
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
|
||||||
if (BREAKDOWN.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
|
||||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
|
||||||
String name = parser.currentName();
|
|
||||||
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, parser.nextToken(), parser::getTokenLocation);
|
|
||||||
long value = parser.longValue();
|
|
||||||
timings.put(name, value);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
parser.skipChildren();
|
|
||||||
}
|
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
|
||||||
if (CHILDREN.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
|
||||||
children.add(ProfileResult.fromXContent(parser));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
parser.skipChildren();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new ProfileResult(type, description, timings, children);
|
|
||||||
}
|
}
|
||||||
|
public static ProfileResult fromXContent(XContentParser p) throws IOException {
|
||||||
/**
|
return PARSER.parse(p, null);
|
||||||
* @param timings a map of breakdown timing for the node
|
|
||||||
* @return The total time at this node
|
|
||||||
*/
|
|
||||||
private static long getTotalTime(Map<String, Long> timings) {
|
|
||||||
long nodeTime = 0;
|
|
||||||
for (long time : timings.values()) {
|
|
||||||
nodeTime += time;
|
|
||||||
}
|
|
||||||
return nodeTime;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,10 +21,30 @@ package org.elasticsearch.search.profile.aggregation;
|
||||||
|
|
||||||
import org.elasticsearch.search.profile.AbstractProfileBreakdown;
|
import org.elasticsearch.search.profile.AbstractProfileBreakdown;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.Collections.unmodifiableMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@linkplain AbstractProfileBreakdown} customized to work with aggregations.
|
||||||
|
*/
|
||||||
public class AggregationProfileBreakdown extends AbstractProfileBreakdown<AggregationTimingType> {
|
public class AggregationProfileBreakdown extends AbstractProfileBreakdown<AggregationTimingType> {
|
||||||
|
private final Map<String, Object> extra = new HashMap<>();
|
||||||
|
|
||||||
public AggregationProfileBreakdown() {
|
public AggregationProfileBreakdown() {
|
||||||
super(AggregationTimingType.class);
|
super(AggregationTimingType.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add extra debugging information about the aggregation.
|
||||||
|
*/
|
||||||
|
public void addDebugInfo(String key, Object value) {
|
||||||
|
extra.put(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Map<String, Object> toDebugMap() {
|
||||||
|
return unmodifiableMap(extra);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,6 +91,7 @@ public class ProfilingAggregator extends Aggregator {
|
||||||
return delegate.buildAggregations(owningBucketOrds);
|
return delegate.buildAggregations(owningBucketOrds);
|
||||||
} finally {
|
} finally {
|
||||||
timer.stop();
|
timer.stop();
|
||||||
|
delegate.collectDebugInfo(profileBreakdown::addDebugInfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
|
||||||
Set<Long> seen = new HashSet<>();
|
Set<Long> seen = new HashSet<>();
|
||||||
seen.add(0L);
|
seen.add(0L);
|
||||||
seen.add(1000L);
|
seen.add(1000L);
|
||||||
|
assertThat(ords.size(), equalTo(2L));
|
||||||
long[] values = new long[scaledRandomIntBetween(1, 10000)];
|
long[] values = new long[scaledRandomIntBetween(1, 10000)];
|
||||||
for (int i = 0; i < values.length; i++) {
|
for (int i = 0; i < values.length; i++) {
|
||||||
values[i] = randomValueOtherThanMany(seen::contains, ESTestCase::randomLong);
|
values[i] = randomValueOtherThanMany(seen::contains, ESTestCase::randomLong);
|
||||||
|
@ -61,6 +62,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
for (int i = 0; i < values.length; i++) {
|
for (int i = 0; i < values.length; i++) {
|
||||||
assertThat(ords.add(0, values[i]), equalTo(i + 2L));
|
assertThat(ords.add(0, values[i]), equalTo(i + 2L));
|
||||||
|
assertThat(ords.size(), equalTo(i + 3L));
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
assertThat(ords.add(0, 0), equalTo(-1L));
|
assertThat(ords.add(0, 0), equalTo(-1L));
|
||||||
}
|
}
|
||||||
|
@ -102,6 +104,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
|
||||||
assertThat(ords.add(1, 0), equalTo(1L));
|
assertThat(ords.add(1, 0), equalTo(1L));
|
||||||
assertThat(ords.add(0, 0), equalTo(-1L));
|
assertThat(ords.add(0, 0), equalTo(-1L));
|
||||||
assertThat(ords.add(1, 0), equalTo(-2L));
|
assertThat(ords.add(1, 0), equalTo(-2L));
|
||||||
|
assertThat(ords.size(), equalTo(2L));
|
||||||
|
|
||||||
// And some random values
|
// And some random values
|
||||||
Set<OwningBucketOrdAndValue> seen = new HashSet<>();
|
Set<OwningBucketOrdAndValue> seen = new HashSet<>();
|
||||||
|
@ -116,6 +119,7 @@ public class LongKeyedBucketOrdsTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
for (int i = 0; i < values.length; i++) {
|
for (int i = 0; i < values.length; i++) {
|
||||||
assertThat(ords.add(values[i].owningBucketOrd, values[i].value), equalTo(i + 2L));
|
assertThat(ords.add(values[i].owningBucketOrd, values[i].value), equalTo(i + 2L));
|
||||||
|
assertThat(ords.size(), equalTo(i + 3L));
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
assertThat(ords.add(0, 0), equalTo(-1L));
|
assertThat(ords.add(0, 0), equalTo(-1L));
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
@ -46,22 +46,27 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
public static ProfileResult createTestItem(int depth) {
|
public static ProfileResult createTestItem(int depth) {
|
||||||
String type = randomAlphaOfLengthBetween(5, 10);
|
String type = randomAlphaOfLengthBetween(5, 10);
|
||||||
String description = randomAlphaOfLengthBetween(5, 10);
|
String description = randomAlphaOfLengthBetween(5, 10);
|
||||||
int timingsSize = randomIntBetween(0, 5);
|
int breakdownsSize = randomIntBetween(0, 5);
|
||||||
Map<String, Long> timings = new HashMap<>(timingsSize);
|
Map<String, Long> breakdown = new HashMap<>(breakdownsSize);
|
||||||
for (int i = 0; i < timingsSize; i++) {
|
while (breakdown.size() < breakdownsSize) {
|
||||||
long time = randomNonNegativeLong() / timingsSize;
|
long value = randomNonNegativeLong();
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
// also often use "small" values in tests
|
// also often use "small" values in tests
|
||||||
time = randomNonNegativeLong() % 10000;
|
value = value % 10000;
|
||||||
}
|
}
|
||||||
timings.put(randomAlphaOfLengthBetween(5, 10), time); // don't overflow Long.MAX_VALUE;
|
breakdown.put(randomAlphaOfLengthBetween(5, 10), value);
|
||||||
|
}
|
||||||
|
int debugSize = randomIntBetween(0, 5);
|
||||||
|
Map<String, Object> debug = new HashMap<>(debugSize);
|
||||||
|
while (debug.size() < debugSize) {
|
||||||
|
debug.put(randomAlphaOfLength(5), randomAlphaOfLength(4));
|
||||||
}
|
}
|
||||||
int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0;
|
int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0;
|
||||||
List<ProfileResult> children = new ArrayList<>(childrenSize);
|
List<ProfileResult> children = new ArrayList<>(childrenSize);
|
||||||
for (int i = 0; i < childrenSize; i++) {
|
for (int i = 0; i < childrenSize; i++) {
|
||||||
children.add(createTestItem(depth - 1));
|
children.add(createTestItem(depth - 1));
|
||||||
}
|
}
|
||||||
return new ProfileResult(type, description, timings, children);
|
return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
public void testFromXContent() throws IOException {
|
||||||
|
@ -83,8 +88,9 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||||
BytesReference mutated;
|
BytesReference mutated;
|
||||||
if (addRandomFields) {
|
if (addRandomFields) {
|
||||||
// "breakdown" just consists of key/value pairs, we shouldn't add anything random there
|
// "breakdown" and "debug" just consists of key/value pairs, we shouldn't add anything random there
|
||||||
Predicate<String> excludeFilter = (s) -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName());
|
Predicate<String> excludeFilter = (s) ->
|
||||||
|
s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) || s.endsWith(ProfileResult.DEBUG.getPreferredName());
|
||||||
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
||||||
} else {
|
} else {
|
||||||
mutated = originalBytes;
|
mutated = originalBytes;
|
||||||
|
@ -102,12 +108,28 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
List<ProfileResult> children = new ArrayList<>();
|
List<ProfileResult> children = new ArrayList<>();
|
||||||
children.add(new ProfileResult("child1", "desc1", Collections.singletonMap("key1", 100L), Collections.emptyList()));
|
children.add(new ProfileResult(
|
||||||
children.add(new ProfileResult("child2", "desc2", Collections.singletonMap("key1", 123356L), Collections.emptyList()));
|
"child1",
|
||||||
Map<String, Long> timings3 = new HashMap<>();
|
"desc1",
|
||||||
timings3.put("key1", 123456L);
|
org.elasticsearch.common.collect.Map.of("key1", 100L),
|
||||||
timings3.put("key2", 100000L);
|
org.elasticsearch.common.collect.Map.of(),
|
||||||
ProfileResult result = new ProfileResult("someType", "some description", timings3, children);
|
100L,
|
||||||
|
org.elasticsearch.common.collect.List.of())
|
||||||
|
);
|
||||||
|
children.add(new ProfileResult(
|
||||||
|
"child2",
|
||||||
|
"desc2",
|
||||||
|
org.elasticsearch.common.collect.Map.of("key1", 123356L),
|
||||||
|
org.elasticsearch.common.collect.Map.of(),
|
||||||
|
123356L,
|
||||||
|
org.elasticsearch.common.collect.List.of()));
|
||||||
|
Map<String, Long> breakdown = new LinkedHashMap<>();
|
||||||
|
breakdown.put("key1", 123456L);
|
||||||
|
breakdown.put("stuff", 10000L);
|
||||||
|
Map<String, Object> debug = new LinkedHashMap<>();
|
||||||
|
debug.put("a", "foo");
|
||||||
|
debug.put("b", "bar");
|
||||||
|
ProfileResult result = new ProfileResult("someType", "some description", breakdown, debug, 223456L, children);
|
||||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
assertEquals("{\n" +
|
assertEquals("{\n" +
|
||||||
|
@ -116,7 +138,11 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
" \"time_in_nanos\" : 223456,\n" +
|
" \"time_in_nanos\" : 223456,\n" +
|
||||||
" \"breakdown\" : {\n" +
|
" \"breakdown\" : {\n" +
|
||||||
" \"key1\" : 123456,\n" +
|
" \"key1\" : 123456,\n" +
|
||||||
" \"key2\" : 100000\n" +
|
" \"stuff\" : 10000\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"debug\" : {\n" +
|
||||||
|
" \"a\" : \"foo\",\n" +
|
||||||
|
" \"b\" : \"bar\"\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"children\" : [\n" +
|
" \"children\" : [\n" +
|
||||||
" {\n" +
|
" {\n" +
|
||||||
|
@ -147,7 +173,11 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
" \"time_in_nanos\" : 223456,\n" +
|
" \"time_in_nanos\" : 223456,\n" +
|
||||||
" \"breakdown\" : {\n" +
|
" \"breakdown\" : {\n" +
|
||||||
" \"key1\" : 123456,\n" +
|
" \"key1\" : 123456,\n" +
|
||||||
" \"key2\" : 100000\n" +
|
" \"stuff\" : 10000\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"debug\" : {\n" +
|
||||||
|
" \"a\" : \"foo\",\n" +
|
||||||
|
" \"b\" : \"bar\"\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"children\" : [\n" +
|
" \"children\" : [\n" +
|
||||||
" {\n" +
|
" {\n" +
|
||||||
|
@ -171,7 +201,14 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
" ]\n" +
|
" ]\n" +
|
||||||
"}", Strings.toString(builder));
|
"}", Strings.toString(builder));
|
||||||
|
|
||||||
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 12345678L), Collections.emptyList());
|
result = new ProfileResult(
|
||||||
|
"profileName",
|
||||||
|
"some description",
|
||||||
|
org.elasticsearch.common.collect.Map.of("key1", 12345678L),
|
||||||
|
org.elasticsearch.common.collect.Map.of(),
|
||||||
|
12345678L,
|
||||||
|
org.elasticsearch.common.collect.List.of()
|
||||||
|
);
|
||||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
assertEquals("{\n" +
|
assertEquals("{\n" +
|
||||||
|
@ -184,8 +221,14 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
" }\n" +
|
" }\n" +
|
||||||
"}", Strings.toString(builder));
|
"}", Strings.toString(builder));
|
||||||
|
|
||||||
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 1234567890L),
|
result = new ProfileResult(
|
||||||
Collections.emptyList());
|
"profileName",
|
||||||
|
"some description",
|
||||||
|
org.elasticsearch.common.collect.Map.of("key1", 1234567890L),
|
||||||
|
org.elasticsearch.common.collect.Map.of(),
|
||||||
|
1234567890L,
|
||||||
|
org.elasticsearch.common.collect.List.of()
|
||||||
|
);
|
||||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
assertEquals("{\n" +
|
assertEquals("{\n" +
|
||||||
|
|
|
@ -80,7 +80,9 @@ public class SearchProfileShardResultsTests extends ESTestCase {
|
||||||
if (addRandomFields) {
|
if (addRandomFields) {
|
||||||
// The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there
|
// The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there
|
||||||
// also we don't want to insert into the root object here, its just the PROFILE_FIELD itself
|
// also we don't want to insert into the root object here, its just the PROFILE_FIELD itself
|
||||||
Predicate<String> excludeFilter = (s) -> (s.isEmpty() || s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()));
|
Predicate<String> excludeFilter = (s) -> s.isEmpty()
|
||||||
|
|| s.endsWith(ProfileResult.BREAKDOWN.getPreferredName())
|
||||||
|
|| s.endsWith(ProfileResult.DEBUG.getPreferredName());
|
||||||
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
||||||
} else {
|
} else {
|
||||||
mutated = originalBytes;
|
mutated = originalBytes;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.test.ESTestCase;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -69,10 +69,13 @@ public class AggregationProfileShardResultTests extends ESTestCase {
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
List<ProfileResult> profileResults = new ArrayList<>();
|
List<ProfileResult> profileResults = new ArrayList<>();
|
||||||
Map<String, Long> timings = new HashMap<>();
|
Map<String, Long> breakdown = new LinkedHashMap<>();
|
||||||
timings.put("timing1", 2000L);
|
breakdown.put("timing1", 2000L);
|
||||||
timings.put("timing2", 4000L);
|
breakdown.put("timing2", 4000L);
|
||||||
ProfileResult profileResult = new ProfileResult("someType", "someDescription", timings, Collections.emptyList());
|
Map<String, Object> debug = new LinkedHashMap<>();
|
||||||
|
debug.put("stuff", "stuff");
|
||||||
|
debug.put("other_stuff", org.elasticsearch.common.collect.List.of("foo", "bar"));
|
||||||
|
ProfileResult profileResult = new ProfileResult("someType", "someDescription", breakdown, debug,6000L, Collections.emptyList());
|
||||||
profileResults.add(profileResult);
|
profileResults.add(profileResult);
|
||||||
AggregationProfileShardResult aggProfileResults = new AggregationProfileShardResult(profileResults);
|
AggregationProfileShardResult aggProfileResults = new AggregationProfileShardResult(profileResults);
|
||||||
BytesReference xContent = toXContent(aggProfileResults, XContentType.JSON, false);
|
BytesReference xContent = toXContent(aggProfileResults, XContentType.JSON, false);
|
||||||
|
@ -80,7 +83,8 @@ public class AggregationProfileShardResultTests extends ESTestCase {
|
||||||
+ "{\"type\":\"someType\","
|
+ "{\"type\":\"someType\","
|
||||||
+ "\"description\":\"someDescription\","
|
+ "\"description\":\"someDescription\","
|
||||||
+ "\"time_in_nanos\":6000,"
|
+ "\"time_in_nanos\":6000,"
|
||||||
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000}"
|
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000},"
|
||||||
|
+ "\"debug\":{\"stuff\":\"stuff\",\"other_stuff\":[\"foo\",\"bar\"]}"
|
||||||
+ "}"
|
+ "}"
|
||||||
+ "]}", xContent.utf8ToString());
|
+ "]}", xContent.utf8ToString());
|
||||||
|
|
||||||
|
@ -90,7 +94,8 @@ public class AggregationProfileShardResultTests extends ESTestCase {
|
||||||
+ "\"description\":\"someDescription\","
|
+ "\"description\":\"someDescription\","
|
||||||
+ "\"time\":\"6micros\","
|
+ "\"time\":\"6micros\","
|
||||||
+ "\"time_in_nanos\":6000,"
|
+ "\"time_in_nanos\":6000,"
|
||||||
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000}"
|
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000},"
|
||||||
|
+ "\"debug\":{\"stuff\":\"stuff\",\"other_stuff\":[\"foo\",\"bar\"]}"
|
||||||
+ "}"
|
+ "}"
|
||||||
+ "]}", xContent.utf8ToString());
|
+ "]}", xContent.utf8ToString());
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,19 +111,19 @@ public class QueryProfilerTests extends ESTestCase {
|
||||||
List<ProfileResult> results = profiler.getTree();
|
List<ProfileResult> results = profiler.getTree();
|
||||||
assertEquals(1, results.size());
|
assertEquals(1, results.size());
|
||||||
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L));
|
||||||
|
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L));
|
||||||
|
|
||||||
long rewriteTime = profiler.getRewriteTime();
|
long rewriteTime = profiler.getRewriteTime();
|
||||||
assertThat(rewriteTime, greaterThan(0L));
|
assertThat(rewriteTime, greaterThan(0L));
|
||||||
|
@ -137,19 +137,19 @@ public class QueryProfilerTests extends ESTestCase {
|
||||||
List<ProfileResult> results = profiler.getTree();
|
List<ProfileResult> results = profiler.getTree();
|
||||||
assertEquals(1, results.size());
|
assertEquals(1, results.size());
|
||||||
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L));
|
||||||
|
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L));
|
||||||
|
|
||||||
long rewriteTime = profiler.getRewriteTime();
|
long rewriteTime = profiler.getRewriteTime();
|
||||||
assertThat(rewriteTime, greaterThan(0L));
|
assertThat(rewriteTime, greaterThan(0L));
|
||||||
|
@ -175,19 +175,19 @@ public class QueryProfilerTests extends ESTestCase {
|
||||||
List<ProfileResult> results = profiler.getTree();
|
List<ProfileResult> results = profiler.getTree();
|
||||||
assertEquals(1, results.size());
|
assertEquals(1, results.size());
|
||||||
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
Map<String, Long> breakdown = results.get(0).getTimeBreakdown();
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString()), greaterThan(0L));
|
||||||
|
|
||||||
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
|
assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L));
|
||||||
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), greaterThan(0L));
|
assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), greaterThan(0L));
|
||||||
|
|
||||||
long rewriteTime = profiler.getRewriteTime();
|
long rewriteTime = profiler.getRewriteTime();
|
||||||
assertThat(rewriteTime, greaterThan(0L));
|
assertThat(rewriteTime, greaterThan(0L));
|
||||||
|
|
Loading…
Reference in New Issue