This adds two extra bits of info to the profiler: 1. Count of the number of different types of collectors. This lets us figure out if we're using the optimization for segment ordinals. It adds a few more similar counters just for good measure. 2. Profiles the `getLeafCollector` and `postCollection` methods. These are non-trivial for some aggregations, like cardinality.
This commit is contained in:
parent
2a7de79a1b
commit
771a8893a6
|
@ -794,7 +794,11 @@ This yields the following aggregation profile output:
|
||||||
"initialize_count": 1,
|
"initialize_count": 1,
|
||||||
"reduce_count": 0,
|
"reduce_count": 0,
|
||||||
"collect": 45786,
|
"collect": 45786,
|
||||||
"collect_count": 4
|
"collect_count": 4,
|
||||||
|
"build_leaf_collector": 18211,
|
||||||
|
"build_leaf_collector_count": 1,
|
||||||
|
"post_collection": 929,
|
||||||
|
"post_collection_count": 1
|
||||||
},
|
},
|
||||||
"debug": {
|
"debug": {
|
||||||
"total_buckets": 1,
|
"total_buckets": 1,
|
||||||
|
@ -813,7 +817,11 @@ This yields the following aggregation profile output:
|
||||||
"initialize_count": 1,
|
"initialize_count": 1,
|
||||||
"reduce_count": 0,
|
"reduce_count": 0,
|
||||||
"collect": 69401,
|
"collect": 69401,
|
||||||
"collect_count": 4
|
"collect_count": 4,
|
||||||
|
"build_leaf_collector": 8150,
|
||||||
|
"build_leaf_collector_count": 1,
|
||||||
|
"post_collection": 1584,
|
||||||
|
"post_collection_count": 1
|
||||||
},
|
},
|
||||||
"children": [
|
"children": [
|
||||||
{
|
{
|
||||||
|
@ -828,7 +836,11 @@ This yields the following aggregation profile output:
|
||||||
"initialize_count": 1,
|
"initialize_count": 1,
|
||||||
"reduce_count": 0,
|
"reduce_count": 0,
|
||||||
"collect": 61611,
|
"collect": 61611,
|
||||||
"collect_count": 4
|
"collect_count": 4,
|
||||||
|
"build_leaf_collector": 5564,
|
||||||
|
"build_leaf_collector_count": 1,
|
||||||
|
"post_collection": 471,
|
||||||
|
"post_collection_count": 1
|
||||||
},
|
},
|
||||||
"debug": {
|
"debug": {
|
||||||
"total_buckets": 1,
|
"total_buckets": 1,
|
||||||
|
|
|
@ -212,3 +212,81 @@ setup:
|
||||||
cardinality:
|
cardinality:
|
||||||
field: int_field
|
field: int_field
|
||||||
precision_threshold: -1
|
precision_threshold: -1
|
||||||
|
|
||||||
|
---
|
||||||
|
"profiler int":
|
||||||
|
- skip:
|
||||||
|
version: " - 7.99.99"
|
||||||
|
reason: new info added in 8.0.0 to be backported to 7.10.0
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
size: 0
|
||||||
|
aggs:
|
||||||
|
distinct_int:
|
||||||
|
cardinality:
|
||||||
|
field: int_field
|
||||||
|
- match: { aggregations.distinct_int.value: 4 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.initialize: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_leaf_collector: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.collect: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_aggregation: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.post_collection: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.empty_collectors_used: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.debug.numeric_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.ordinals_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.ordinals_collectors_overhead_too_high: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.string_hashing_collectors_used: 0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"profiler double":
|
||||||
|
- skip:
|
||||||
|
version: " - 7.99.99"
|
||||||
|
reason: new info added in 8.0.0 to be backported to 7.10.0
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
size: 0
|
||||||
|
aggs:
|
||||||
|
distinct_double:
|
||||||
|
cardinality:
|
||||||
|
field: double_field
|
||||||
|
- match: { aggregations.distinct_double.value: 4 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.initialize: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_leaf_collector: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.collect: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_aggregation: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.post_collection: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.empty_collectors_used: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.debug.numeric_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.ordinals_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.ordinals_collectors_overhead_too_high: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.string_hashing_collectors_used: 0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"profiler string":
|
||||||
|
- skip:
|
||||||
|
version: " - 7.99.99"
|
||||||
|
reason: new info added in 8.0.0 to be backported to 7.10.0
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
size: 0
|
||||||
|
aggs:
|
||||||
|
distinct_string:
|
||||||
|
cardinality:
|
||||||
|
field: string_field
|
||||||
|
- match: { aggregations.distinct_string.value: 1 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.initialize: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_leaf_collector: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.collect: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.build_aggregation: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.breakdown.post_collection: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.empty_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.numeric_collectors_used: 0 }
|
||||||
|
- gt: { profile.shards.0.aggregations.0.debug.ordinals_collectors_used: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.ordinals_collectors_overhead_too_high: 0 }
|
||||||
|
- match: { profile.shards.0.aggregations.0.debug.string_hashing_collectors_used: 0 }
|
||||||
|
|
|
@ -50,16 +50,28 @@ import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
@ESIntegTestCase.SuiteScopeTestCase
|
@ESIntegTestCase.SuiteScopeTestCase
|
||||||
public class AggregationProfilerIT extends ESIntegTestCase {
|
public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
|
private static final String BUILD_LEAF_COLLECTOR = AggregationTimingType.BUILD_LEAF_COLLECTOR.toString();
|
||||||
private static final String COLLECT = AggregationTimingType.COLLECT.toString();
|
private static final String COLLECT = AggregationTimingType.COLLECT.toString();
|
||||||
|
private static final String POST_COLLECTION = AggregationTimingType.POST_COLLECTION.toString();
|
||||||
private static final String INITIALIZE = AggregationTimingType.INITIALIZE.toString();
|
private static final String INITIALIZE = AggregationTimingType.INITIALIZE.toString();
|
||||||
private static final String BUILD_AGGREGATION = AggregationTimingType.BUILD_AGGREGATION.toString();
|
private static final String BUILD_AGGREGATION = AggregationTimingType.BUILD_AGGREGATION.toString();
|
||||||
private static final String REDUCE = AggregationTimingType.REDUCE.toString();
|
private static final String REDUCE = AggregationTimingType.REDUCE.toString();
|
||||||
private static final Set<String> BREAKDOWN_KEYS = org.elasticsearch.common.collect.Set.of(
|
private static final Set<String> BREAKDOWN_KEYS = org.elasticsearch.common.collect.Set.of(
|
||||||
COLLECT, INITIALIZE, BUILD_AGGREGATION, REDUCE,
|
INITIALIZE,
|
||||||
COLLECT + "_count", INITIALIZE + "_count", BUILD_AGGREGATION + "_count", REDUCE + "_count");
|
BUILD_LEAF_COLLECTOR,
|
||||||
|
COLLECT,
|
||||||
|
POST_COLLECTION,
|
||||||
|
BUILD_AGGREGATION,
|
||||||
|
REDUCE,
|
||||||
|
INITIALIZE + "_count",
|
||||||
|
BUILD_LEAF_COLLECTOR + "_count",
|
||||||
|
COLLECT + "_count",
|
||||||
|
POST_COLLECTION + "_count",
|
||||||
|
BUILD_AGGREGATION + "_count",
|
||||||
|
REDUCE + "_count"
|
||||||
|
);
|
||||||
|
|
||||||
private static final String TOTAL_BUCKETS = "total_buckets";
|
private static final String TOTAL_BUCKETS = "total_buckets";
|
||||||
private static final String WRAPPED = "wrapped_in_multi_bucket_aggregator";
|
|
||||||
private static final String DEFERRED = "deferred_aggregators";
|
private static final String DEFERRED = "deferred_aggregators";
|
||||||
private static final String COLLECTION_STRAT = "collection_strategy";
|
private static final String COLLECTION_STRAT = "collection_strategy";
|
||||||
private static final String RESULT_STRAT = "result_strategy";
|
private static final String RESULT_STRAT = "result_strategy";
|
||||||
|
@ -316,7 +328,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(diversifyBreakdown, notNullValue());
|
assertThat(diversifyBreakdown, notNullValue());
|
||||||
assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(diversifyBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(diversifyAggResult.getDebugInfo(), equalTo(
|
assertThat(diversifyAggResult.getDebugInfo(), equalTo(
|
||||||
|
@ -331,8 +345,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
|
Map<String, Long> maxBreakdown = maxAggResult.getTimeBreakdown();
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
|
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(diversifyBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -374,7 +390,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(histoBreakdown, notNullValue());
|
assertThat(histoBreakdown, notNullValue());
|
||||||
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(histoBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(histoBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(histoBreakdown.get(REDUCE), equalTo(0L));
|
||||||
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
|
Map<String, Object> histoDebugInfo = histoAggResult.getDebugInfo();
|
||||||
|
@ -394,7 +412,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(tagsBreakdown, notNullValue());
|
assertThat(tagsBreakdown, notNullValue());
|
||||||
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertRemapTermsDebugInfo(tagsAggResult);
|
assertRemapTermsDebugInfo(tagsAggResult);
|
||||||
|
@ -411,7 +431,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -425,7 +447,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -439,7 +463,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(stringsBreakdown, notNullValue());
|
assertThat(stringsBreakdown, notNullValue());
|
||||||
assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(stringsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(stringsBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(stringsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(stringsBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(stringsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertRemapTermsDebugInfo(stringsAggResult);
|
assertRemapTermsDebugInfo(stringsAggResult);
|
||||||
|
@ -456,7 +482,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -470,7 +498,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -485,7 +515,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(tagsBreakdown, notNullValue());
|
assertThat(tagsBreakdown, notNullValue());
|
||||||
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(tagsBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(tagsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(tagsBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertRemapTermsDebugInfo(tagsAggResult);
|
assertRemapTermsDebugInfo(tagsAggResult);
|
||||||
|
@ -502,7 +534,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(avgBreakdown, notNullValue());
|
assertThat(avgBreakdown, notNullValue());
|
||||||
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(avgBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(avgBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(avgAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
@ -516,7 +550,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
assertThat(maxBreakdown, notNullValue());
|
assertThat(maxBreakdown, notNullValue());
|
||||||
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
|
||||||
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
|
||||||
|
assertThat(maxBreakdown.get(POST_COLLECTION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
assertThat(maxBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
|
||||||
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
assertThat(maxBreakdown.get(REDUCE), equalTo(0L));
|
||||||
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
assertThat(maxAggResult.getDebugInfo(), equalTo(org.elasticsearch.common.collect.Map.of()));
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
import org.apache.lucene.util.RamUsageEstimator;
|
import org.apache.lucene.util.RamUsageEstimator;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.hash.MurmurHash3;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
|
@ -46,6 +47,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An aggregator that computes approximate counts of unique values.
|
* An aggregator that computes approximate counts of unique values.
|
||||||
|
@ -61,6 +63,12 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
|
|
||||||
private Collector collector;
|
private Collector collector;
|
||||||
|
|
||||||
|
private int emptyCollectorsUsed;
|
||||||
|
private int numericCollectorsUsed;
|
||||||
|
private int ordinalsCollectorsUsed;
|
||||||
|
private int ordinalsCollectorsOverheadTooHigh;
|
||||||
|
private int stringHashingCollectorsUsed;
|
||||||
|
|
||||||
public CardinalityAggregator(
|
public CardinalityAggregator(
|
||||||
String name,
|
String name,
|
||||||
ValuesSourceConfig valuesSourceConfig,
|
ValuesSourceConfig valuesSourceConfig,
|
||||||
|
@ -82,6 +90,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
|
|
||||||
private Collector pickCollector(LeafReaderContext ctx) throws IOException {
|
private Collector pickCollector(LeafReaderContext ctx) throws IOException {
|
||||||
if (valuesSource == null) {
|
if (valuesSource == null) {
|
||||||
|
emptyCollectorsUsed++;
|
||||||
return new EmptyCollector();
|
return new EmptyCollector();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,6 +98,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
|
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
|
||||||
MurmurHash3Values hashValues = source.isFloatingPoint() ?
|
MurmurHash3Values hashValues = source.isFloatingPoint() ?
|
||||||
MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx));
|
MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx));
|
||||||
|
numericCollectorsUsed++;
|
||||||
return new DirectCollector(counts, hashValues);
|
return new DirectCollector(counts, hashValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,6 +107,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
final SortedSetDocValues ordinalValues = source.ordinalsValues(ctx);
|
final SortedSetDocValues ordinalValues = source.ordinalsValues(ctx);
|
||||||
final long maxOrd = ordinalValues.getValueCount();
|
final long maxOrd = ordinalValues.getValueCount();
|
||||||
if (maxOrd == 0) {
|
if (maxOrd == 0) {
|
||||||
|
emptyCollectorsUsed++;
|
||||||
return new EmptyCollector();
|
return new EmptyCollector();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,10 +115,13 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
final long countsMemoryUsage = HyperLogLogPlusPlus.memoryUsage(precision);
|
final long countsMemoryUsage = HyperLogLogPlusPlus.memoryUsage(precision);
|
||||||
// only use ordinals if they don't increase memory usage by more than 25%
|
// only use ordinals if they don't increase memory usage by more than 25%
|
||||||
if (ordinalsMemoryUsage < countsMemoryUsage / 4) {
|
if (ordinalsMemoryUsage < countsMemoryUsage / 4) {
|
||||||
|
ordinalsCollectorsUsed++;
|
||||||
return new OrdinalsCollector(counts, ordinalValues, context.bigArrays());
|
return new OrdinalsCollector(counts, ordinalValues, context.bigArrays());
|
||||||
}
|
}
|
||||||
|
ordinalsCollectorsOverheadTooHigh++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stringHashingCollectorsUsed++;
|
||||||
return new DirectCollector(counts, MurmurHash3Values.hash(valuesSource.bytesValues(ctx)));
|
return new DirectCollector(counts, MurmurHash3Values.hash(valuesSource.bytesValues(ctx)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,6 +177,16 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
Releasables.close(counts, collector);
|
Releasables.close(counts, collector);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
add.accept("empty_collectors_used", emptyCollectorsUsed);
|
||||||
|
add.accept("numeric_collectors_used", numericCollectorsUsed);
|
||||||
|
add.accept("ordinals_collectors_used", ordinalsCollectorsUsed);
|
||||||
|
add.accept("ordinals_collectors_overhead_too_high", ordinalsCollectorsOverheadTooHigh);
|
||||||
|
add.accept("string_hashing_collectors_used", stringHashingCollectorsUsed);
|
||||||
|
}
|
||||||
|
|
||||||
private abstract static class Collector extends LeafBucketCollector implements Releasable {
|
private abstract static class Collector extends LeafBucketCollector implements Releasable {
|
||||||
|
|
||||||
public abstract void postCollect() throws IOException;
|
public abstract void postCollect() throws IOException;
|
||||||
|
@ -234,7 +258,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
private final SortedSetDocValues values;
|
private final SortedSetDocValues values;
|
||||||
private final int maxOrd;
|
private final int maxOrd;
|
||||||
private final HyperLogLogPlusPlus counts;
|
private final HyperLogLogPlusPlus counts;
|
||||||
private ObjectArray<FixedBitSet> visitedOrds;
|
private ObjectArray<FixedBitSet> visitedOrds; // Danger! This is not tracked by BigArrays!
|
||||||
|
|
||||||
OrdinalsCollector(HyperLogLogPlusPlus counts, SortedSetDocValues values,
|
OrdinalsCollector(HyperLogLogPlusPlus counts, SortedSetDocValues values,
|
||||||
BigArrays bigArrays) {
|
BigArrays bigArrays) {
|
||||||
|
@ -273,12 +297,12 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128();
|
final MurmurHash3.Hash128 hash = new MurmurHash3.Hash128();
|
||||||
try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) {
|
try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) {
|
||||||
for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS;
|
for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS;
|
||||||
ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) {
|
ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
final BytesRef value = values.lookupOrd(ord);
|
final BytesRef value = values.lookupOrd(ord);
|
||||||
org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash);
|
MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash);
|
||||||
hashes.set(ord, hash.h1);
|
hashes.set(ord, hash.h1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -298,7 +322,6 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
public void close() {
|
public void close() {
|
||||||
Releasables.close(visitedOrds);
|
Releasables.close(visitedOrds);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -383,8 +406,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
|
|
||||||
private static class Bytes extends MurmurHash3Values {
|
private static class Bytes extends MurmurHash3Values {
|
||||||
|
|
||||||
private final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash =
|
private final MurmurHash3.Hash128 hash = new MurmurHash3.Hash128();
|
||||||
new org.elasticsearch.common.hash.MurmurHash3.Hash128();
|
|
||||||
|
|
||||||
private final SortedBinaryDocValues values;
|
private final SortedBinaryDocValues values;
|
||||||
|
|
||||||
|
@ -405,7 +427,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
@Override
|
@Override
|
||||||
public long nextValue() throws IOException {
|
public long nextValue() throws IOException {
|
||||||
final BytesRef bytes = values.nextValue();
|
final BytesRef bytes = values.nextValue();
|
||||||
org.elasticsearch.common.hash.MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, hash);
|
MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, hash);
|
||||||
return hash.h1;
|
return hash.h1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,9 @@ import java.util.Locale;
|
||||||
|
|
||||||
public enum AggregationTimingType {
|
public enum AggregationTimingType {
|
||||||
INITIALIZE,
|
INITIALIZE,
|
||||||
|
BUILD_LEAF_COLLECTOR,
|
||||||
COLLECT,
|
COLLECT,
|
||||||
|
POST_COLLECTION,
|
||||||
BUILD_AGGREGATION,
|
BUILD_AGGREGATION,
|
||||||
REDUCE;
|
REDUCE;
|
||||||
|
|
||||||
|
|
|
@ -102,7 +102,13 @@ public class ProfilingAggregator extends Aggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException {
|
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException {
|
||||||
|
Timer timer = profileBreakdown.getTimer(AggregationTimingType.BUILD_LEAF_COLLECTOR);
|
||||||
|
timer.start();
|
||||||
|
try {
|
||||||
return new ProfilingLeafBucketCollector(delegate.getLeafCollector(ctx), profileBreakdown);
|
return new ProfilingLeafBucketCollector(delegate.getLeafCollector(ctx), profileBreakdown);
|
||||||
|
} finally {
|
||||||
|
timer.stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -120,7 +126,13 @@ public class ProfilingAggregator extends Aggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void postCollection() throws IOException {
|
public void postCollection() throws IOException {
|
||||||
|
Timer timer = profileBreakdown.getTimer(AggregationTimingType.POST_COLLECTION);
|
||||||
|
timer.start();
|
||||||
|
try {
|
||||||
delegate.postCollection();
|
delegate.postCollection();
|
||||||
|
} finally {
|
||||||
|
timer.stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -46,6 +46,7 @@ yamlRestTest {
|
||||||
// Runtime fields don't have global ords
|
// Runtime fields don't have global ords
|
||||||
'search.aggregation/20_terms/string profiler via global ordinals',
|
'search.aggregation/20_terms/string profiler via global ordinals',
|
||||||
'search.aggregation/20_terms/Global ordinals are loaded with the global_ordinals execution hint',
|
'search.aggregation/20_terms/Global ordinals are loaded with the global_ordinals execution hint',
|
||||||
|
'search.aggregation/170_cardinality_metric/profiler string',
|
||||||
//dynamic template causes a type _doc to be created, these tests use another type but only one type is allowed
|
//dynamic template causes a type _doc to be created, these tests use another type but only one type is allowed
|
||||||
'search.aggregation/51_filter_with_types/*',
|
'search.aggregation/51_filter_with_types/*',
|
||||||
'search/171_terms_query_with_types/*',
|
'search/171_terms_query_with_types/*',
|
||||||
|
|
Loading…
Reference in New Issue