mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
ProfileResult and CollectorResult should print machine readable timing information (#22561)
Currently both ProfileResult and CollectorResult print the time field in a human readable string format (e.g. "time": "55.20315000ms"). When trying to parse this back to a long value, for example to use in the planned high level java rest client, we can lose precision because of conversion and rounding issues. This change adds a new additional field (`time_in_nanos`) to the profile response to be able to get the original time value in nanoseconds back. The old `time` field is only printed when the `?`human=true` flag in the url is set. This follow the behaviour for all other stats-related apis. Also the format of the `time` field is slightly changed. Instead of always formatting the output as a 10-digit ms value, by using the `XContentBuilder#timeValueField()` method we now print the largest time unit present is used (e.g. "s", "ms", "micros").
This commit is contained in:
parent
ee0c4c1776
commit
59a48ffc41
@ -31,8 +31,8 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* This class is the internal representation of a profiled Query, corresponding
|
||||
@ -48,6 +48,7 @@ public final class ProfileResult implements Writeable, ToXContent {
|
||||
private static final ParseField TYPE = new ParseField("type");
|
||||
private static final ParseField DESCRIPTION = new ParseField("description");
|
||||
private static final ParseField NODE_TIME = new ParseField("time");
|
||||
private static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
|
||||
private static final ParseField CHILDREN = new ParseField("children");
|
||||
private static final ParseField BREAKDOWN = new ParseField("breakdown");
|
||||
|
||||
@ -146,7 +147,7 @@ public final class ProfileResult implements Writeable, ToXContent {
|
||||
builder = builder.startObject()
|
||||
.field(TYPE.getPreferredName(), type)
|
||||
.field(DESCRIPTION.getPreferredName(), description)
|
||||
.field(NODE_TIME.getPreferredName(), String.format(Locale.US, "%.10gms", getTime() / 1000000.0))
|
||||
.timeValueField(NODE_TIME_RAW.getPreferredName(), NODE_TIME.getPreferredName(), getTime(), TimeUnit.NANOSECONDS)
|
||||
.field(BREAKDOWN.getPreferredName(), timings);
|
||||
|
||||
if (!children.isEmpty()) {
|
||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Public interface and serialization container for profiled timings of the
|
||||
@ -52,6 +52,7 @@ public class CollectorResult implements ToXContent, Writeable {
|
||||
private static final ParseField NAME = new ParseField("name");
|
||||
private static final ParseField REASON = new ParseField("reason");
|
||||
private static final ParseField TIME = new ParseField("time");
|
||||
private static final ParseField TIME_NANOS = new ParseField("time_in_nanos");
|
||||
private static final ParseField CHILDREN = new ParseField("children");
|
||||
|
||||
/**
|
||||
@ -140,7 +141,7 @@ public class CollectorResult implements ToXContent, Writeable {
|
||||
builder = builder.startObject()
|
||||
.field(NAME.getPreferredName(), getName())
|
||||
.field(REASON.getPreferredName(), getReason())
|
||||
.field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0)));
|
||||
.timeValueField(TIME_NANOS.getPreferredName(), TIME.getPreferredName(), getTime(), TimeUnit.NANOSECONDS);
|
||||
|
||||
if (!children.isEmpty()) {
|
||||
builder = builder.startArray(CHILDREN.getPreferredName());
|
||||
|
@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.profile;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ProfileResultTests extends ESTestCase {
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
List<ProfileResult> children = new ArrayList<>();
|
||||
children.add(new ProfileResult("child1", "desc1", Collections.emptyMap(), Collections.emptyList(), 100L));
|
||||
children.add(new ProfileResult("child2", "desc2", Collections.emptyMap(), Collections.emptyList(), 123356L));
|
||||
Map<String, Long> timings = new HashMap<>();
|
||||
timings.put("key1", 12345L);
|
||||
timings.put("key2", 6789L);
|
||||
ProfileResult result = new ProfileResult("someType", "some description", timings, children, 123456L);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"type\" : \"someType\",\n" +
|
||||
" \"description\" : \"some description\",\n" +
|
||||
" \"time_in_nanos\" : 123456,\n" +
|
||||
" \"breakdown\" : {\n" +
|
||||
" \"key1\" : 12345,\n" +
|
||||
" \"key2\" : 6789\n" +
|
||||
" },\n" +
|
||||
" \"children\" : [\n" +
|
||||
" {\n" +
|
||||
" \"type\" : \"child1\",\n" +
|
||||
" \"description\" : \"desc1\",\n" +
|
||||
" \"time_in_nanos\" : 100,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"type\" : \"child2\",\n" +
|
||||
" \"description\" : \"desc2\",\n" +
|
||||
" \"time_in_nanos\" : 123356,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}", builder.string());
|
||||
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"type\" : \"someType\",\n" +
|
||||
" \"description\" : \"some description\",\n" +
|
||||
" \"time\" : \"123.4micros\",\n" +
|
||||
" \"time_in_nanos\" : 123456,\n" +
|
||||
" \"breakdown\" : {\n" +
|
||||
" \"key1\" : 12345,\n" +
|
||||
" \"key2\" : 6789\n" +
|
||||
" },\n" +
|
||||
" \"children\" : [\n" +
|
||||
" {\n" +
|
||||
" \"type\" : \"child1\",\n" +
|
||||
" \"description\" : \"desc1\",\n" +
|
||||
" \"time\" : \"100nanos\",\n" +
|
||||
" \"time_in_nanos\" : 100,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"type\" : \"child2\",\n" +
|
||||
" \"description\" : \"desc2\",\n" +
|
||||
" \"time\" : \"123.3micros\",\n" +
|
||||
" \"time_in_nanos\" : 123356,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}", builder.string());
|
||||
|
||||
result = new ProfileResult("profileName", "some description", Collections.emptyMap(), Collections.emptyList(), 12345678L);
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"type\" : \"profileName\",\n" +
|
||||
" \"description\" : \"some description\",\n" +
|
||||
" \"time\" : \"12.3ms\",\n" +
|
||||
" \"time_in_nanos\" : 12345678,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
"}", builder.string());
|
||||
|
||||
result = new ProfileResult("profileName", "some description", Collections.emptyMap(), Collections.emptyList(), 1234567890L);
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"type\" : \"profileName\",\n" +
|
||||
" \"description\" : \"some description\",\n" +
|
||||
" \"time\" : \"1.2s\",\n" +
|
||||
" \"time_in_nanos\" : 1234567890,\n" +
|
||||
" \"breakdown\" : { }\n" +
|
||||
"}", builder.string());
|
||||
}
|
||||
}
|
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.profile.query;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class CollectorResultTests extends ESTestCase {
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
List<CollectorResult> children = new ArrayList<>();
|
||||
children.add(new CollectorResult("child1", "reason1", 100L, Collections.emptyList()));
|
||||
children.add(new CollectorResult("child2", "reason1", 123356L, Collections.emptyList()));
|
||||
CollectorResult result = new CollectorResult("collectorName", "some reason", 123456L, children);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"name\" : \"collectorName\",\n" +
|
||||
" \"reason\" : \"some reason\",\n" +
|
||||
" \"time_in_nanos\" : 123456,\n" +
|
||||
" \"children\" : [\n" +
|
||||
" {\n" +
|
||||
" \"name\" : \"child1\",\n" +
|
||||
" \"reason\" : \"reason1\",\n" +
|
||||
" \"time_in_nanos\" : 100\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"name\" : \"child2\",\n" +
|
||||
" \"reason\" : \"reason1\",\n" +
|
||||
" \"time_in_nanos\" : 123356\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}", builder.string());
|
||||
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"name\" : \"collectorName\",\n" +
|
||||
" \"reason\" : \"some reason\",\n" +
|
||||
" \"time\" : \"123.4micros\",\n" +
|
||||
" \"time_in_nanos\" : 123456,\n" +
|
||||
" \"children\" : [\n" +
|
||||
" {\n" +
|
||||
" \"name\" : \"child1\",\n" +
|
||||
" \"reason\" : \"reason1\",\n" +
|
||||
" \"time\" : \"100nanos\",\n" +
|
||||
" \"time_in_nanos\" : 100\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" \"name\" : \"child2\",\n" +
|
||||
" \"reason\" : \"reason1\",\n" +
|
||||
" \"time\" : \"123.3micros\",\n" +
|
||||
" \"time_in_nanos\" : 123356\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}", builder.string());
|
||||
|
||||
result = new CollectorResult("collectorName", "some reason", 12345678L, Collections.emptyList());
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"name\" : \"collectorName\",\n" +
|
||||
" \"reason\" : \"some reason\",\n" +
|
||||
" \"time\" : \"12.3ms\",\n" +
|
||||
" \"time_in_nanos\" : 12345678\n" +
|
||||
"}", builder.string());
|
||||
|
||||
result = new CollectorResult("collectorName", "some reason", 1234567890L, Collections.emptyList());
|
||||
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
|
||||
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\n" +
|
||||
" \"name\" : \"collectorName\",\n" +
|
||||
" \"reason\" : \"some reason\",\n" +
|
||||
" \"time\" : \"1.2s\",\n" +
|
||||
" \"time_in_nanos\" : 1234567890\n" +
|
||||
"}", builder.string());
|
||||
}
|
||||
}
|
@ -33,3 +33,9 @@
|
||||
|
||||
The search shards API no longer accepts the `type` url parameter, which didn't
|
||||
have any effect in previous versions.
|
||||
|
||||
==== Changes to the Profile API
|
||||
|
||||
* The `"time"` field showing human readable timing output has been replaced by the `"time_in_nanos"`
|
||||
field which displays the elapsed time in nanoseconds. The `"time"` field can be turned on by adding
|
||||
`"?human=true"` to the request url. It will display a rounded, human readable time value.
|
||||
|
@ -58,7 +58,7 @@ This will yield the following result:
|
||||
{
|
||||
"type": "BooleanQuery",
|
||||
"description": "message:message message:number",
|
||||
"time": "1.873811000ms",
|
||||
"time_in_nanos": "1873811",
|
||||
"breakdown": {
|
||||
"score": 51306,
|
||||
"score_count": 4,
|
||||
@ -77,7 +77,7 @@ This will yield the following result:
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "message:message",
|
||||
"time": "0.3919430000ms",
|
||||
"time_in_nanos": "391943",
|
||||
"breakdown": {
|
||||
"score": 28776,
|
||||
"score_count": 4,
|
||||
@ -96,7 +96,7 @@ This will yield the following result:
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "message:number",
|
||||
"time": "0.2106820000ms",
|
||||
"time_in_nanos": "210682",
|
||||
"breakdown": {
|
||||
"score": 4552,
|
||||
"score_count": 4,
|
||||
@ -120,12 +120,12 @@ This will yield the following result:
|
||||
{
|
||||
"name": "CancellableCollector",
|
||||
"reason": "search_cancelled",
|
||||
"time": "0.3043110000ms",
|
||||
"time_in_nanos": "304311",
|
||||
"children": [
|
||||
{
|
||||
"name": "SimpleTopScoreDocCollector",
|
||||
"reason": "search_top_hits",
|
||||
"time": "0.03227300000ms"
|
||||
"time_in_nanos": "32273"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -143,22 +143,22 @@ This will yield the following result:
|
||||
// TESTRESPONSE[s/"id": "\[2aE02wS1R8q_QFnYu6vDVQ\]\[twitter\]\[1\]"/"id": $body.profile.shards.0.id/]
|
||||
// TESTRESPONSE[s/"rewrite_time": 51443/"rewrite_time": $body.profile.shards.0.searches.0.rewrite_time/]
|
||||
// TESTRESPONSE[s/"score": 51306/"score": $body.profile.shards.0.searches.0.query.0.breakdown.score/]
|
||||
// TESTRESPONSE[s/"time": "1.873811000ms"/"time": $body.profile.shards.0.searches.0.query.0.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "1873811"/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"build_scorer": 2935582/"build_scorer": $body.profile.shards.0.searches.0.query.0.breakdown.build_scorer/]
|
||||
// TESTRESPONSE[s/"create_weight": 919297/"create_weight": $body.profile.shards.0.searches.0.query.0.breakdown.create_weight/]
|
||||
// TESTRESPONSE[s/"next_doc": 53876/"next_doc": $body.profile.shards.0.searches.0.query.0.breakdown.next_doc/]
|
||||
// TESTRESPONSE[s/"time": "0.3919430000ms"/"time": $body.profile.shards.0.searches.0.query.0.children.0.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "391943"/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.children.0.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"score": 28776/"score": $body.profile.shards.0.searches.0.query.0.children.0.breakdown.score/]
|
||||
// TESTRESPONSE[s/"build_scorer": 784451/"build_scorer": $body.profile.shards.0.searches.0.query.0.children.0.breakdown.build_scorer/]
|
||||
// TESTRESPONSE[s/"create_weight": 1669564/"create_weight": $body.profile.shards.0.searches.0.query.0.children.0.breakdown.create_weight/]
|
||||
// TESTRESPONSE[s/"next_doc": 10111/"next_doc": $body.profile.shards.0.searches.0.query.0.children.0.breakdown.next_doc/]
|
||||
// TESTRESPONSE[s/"time": "0.2106820000ms"/"time": $body.profile.shards.0.searches.0.query.0.children.1.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "210682"/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.children.1.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"score": 4552/"score": $body.profile.shards.0.searches.0.query.0.children.1.breakdown.score/]
|
||||
// TESTRESPONSE[s/"build_scorer": 42602/"build_scorer": $body.profile.shards.0.searches.0.query.0.children.1.breakdown.build_scorer/]
|
||||
// TESTRESPONSE[s/"create_weight": 89323/"create_weight": $body.profile.shards.0.searches.0.query.0.children.1.breakdown.create_weight/]
|
||||
// TESTRESPONSE[s/"next_doc": 2852/"next_doc": $body.profile.shards.0.searches.0.query.0.children.1.breakdown.next_doc/]
|
||||
// TESTRESPONSE[s/"time": "0.3043110000ms"/"time": $body.profile.shards.0.searches.0.collector.0.time/]
|
||||
// TESTRESPONSE[s/"time": "0.03227300000ms"/"time": $body.profile.shards.0.searches.0.collector.0.children.0.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "304311"/"time_in_nanos": $body.profile.shards.0.searches.0.collector.0.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "32273"/"time_in_nanos": $body.profile.shards.0.searches.0.collector.0.children.0.time_in_nanos/]
|
||||
// Sorry for this mess....
|
||||
|
||||
<1> Search results are returned, but were omitted here for brevity
|
||||
@ -216,6 +216,12 @@ a `query` array and a `collector` array. Alongside the `search` object is an `a
|
||||
|
||||
There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds).
|
||||
|
||||
==== Human readable output
|
||||
|
||||
As with other statistics apis, the Profile API supports human readable outputs for the time values. This can be turned on by adding
|
||||
`?human=true` to the query string. In this case, in addition to the `"time_in_nanos"` field, the output contains the additional `"time"`
|
||||
field with containing a rounded, human readable time value , (e.g. `"time": "391,9ms"`, `"time": "123.3micros"`).
|
||||
|
||||
=== Profiling Queries
|
||||
|
||||
[NOTE]
|
||||
@ -244,19 +250,19 @@ The overall structure of this query tree will resemble your original Elasticsear
|
||||
{
|
||||
"type": "BooleanQuery",
|
||||
"description": "message:message message:number",
|
||||
"time": "1.873811000ms",
|
||||
"time_in_nanos": "1873811",
|
||||
"breakdown": {...}, <1>
|
||||
"children": [
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "message:message",
|
||||
"time": "0.3919430000ms",
|
||||
"time_in_nanos": "391943",
|
||||
"breakdown": {...}
|
||||
},
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "message:number",
|
||||
"time": "0.2106820000ms",
|
||||
"time_in_nanos": "210682",
|
||||
"breakdown": {...}
|
||||
}
|
||||
]
|
||||
@ -265,9 +271,9 @@ The overall structure of this query tree will resemble your original Elasticsear
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.profile.shards.0.id",\n"searches": [{\n/]
|
||||
// TESTRESPONSE[s/]$/],"rewrite_time": $body.profile.shards.0.searches.0.rewrite_time, "collector": $body.profile.shards.0.searches.0.collector}], "aggregations": []}]}}/]
|
||||
// TESTRESPONSE[s/"time": "1.873811000ms",\n.+"breakdown": \{...\}/"time": $body.profile.shards.0.searches.0.query.0.time, "breakdown": $body.profile.shards.0.searches.0.query.0.breakdown/]
|
||||
// TESTRESPONSE[s/"time": "0.3919430000ms",\n.+"breakdown": \{...\}/"time": $body.profile.shards.0.searches.0.query.0.children.0.time, "breakdown": $body.profile.shards.0.searches.0.query.0.children.0.breakdown/]
|
||||
// TESTRESPONSE[s/"time": "0.2106820000ms",\n.+"breakdown": \{...\}/"time": $body.profile.shards.0.searches.0.query.0.children.1.time, "breakdown": $body.profile.shards.0.searches.0.query.0.children.1.breakdown/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "1873811",\n.+"breakdown": \{...\}/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.time_in_nanos, "breakdown": $body.profile.shards.0.searches.0.query.0.breakdown/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "391943",\n.+"breakdown": \{...\}/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.children.0.time_in_nanos, "breakdown": $body.profile.shards.0.searches.0.query.0.children.0.breakdown/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "210682",\n.+"breakdown": \{...\}/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.children.1.time_in_nanos, "breakdown": $body.profile.shards.0.searches.0.query.0.children.1.breakdown/]
|
||||
<1> The breakdown timings are omitted for simplicity
|
||||
|
||||
Based on the profile structure, we can see that our `match` query was rewritten by Lucene into a BooleanQuery with two
|
||||
@ -276,7 +282,7 @@ the equivalent name in Elasticsearch. The `"description"` field displays the Lu
|
||||
is made available to help differentiating between parts of your query (e.g. both `"message:search"` and `"message:test"`
|
||||
are TermQuery's and would appear identical otherwise.
|
||||
|
||||
The `"time"` field shows that this query took ~15ms for the entire BooleanQuery to execute. The recorded time is inclusive
|
||||
The `"time_in_nanos"` field shows that this query took ~1.8ms for the entire BooleanQuery to execute. The recorded time is inclusive
|
||||
of all children.
|
||||
|
||||
The `"breakdown"` field will give detailed stats about how the time was spent, we'll look at
|
||||
@ -305,16 +311,16 @@ The `"breakdown"` component lists detailed timing statistics about low-level Luc
|
||||
"advance_count": 0
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.profile.shards.0.id",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:message message:number",\n"time": $body.profile.shards.0.searches.0.query.0.time,/]
|
||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.profile.shards.0.id",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:message message:number",\n"time_in_nanos": $body.profile.shards.0.searches.0.query.0.time_in_nanos,/]
|
||||
// TESTRESPONSE[s/}$/},\n"children": $body.profile.shards.0.searches.0.query.0.children}],\n"rewrite_time": $body.profile.shards.0.searches.0.rewrite_time, "collector": $body.profile.shards.0.searches.0.collector}], "aggregations": []}]}}/]
|
||||
// TESTRESPONSE[s/"score": 51306/"score": $body.profile.shards.0.searches.0.query.0.breakdown.score/]
|
||||
// TESTRESPONSE[s/"time": "1.873811000ms"/"time": $body.profile.shards.0.searches.0.query.0.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "1873811"/"time_in_nanos": $body.profile.shards.0.searches.0.query.0.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"build_scorer": 2935582/"build_scorer": $body.profile.shards.0.searches.0.query.0.breakdown.build_scorer/]
|
||||
// TESTRESPONSE[s/"create_weight": 919297/"create_weight": $body.profile.shards.0.searches.0.query.0.breakdown.create_weight/]
|
||||
// TESTRESPONSE[s/"next_doc": 53876/"next_doc": $body.profile.shards.0.searches.0.query.0.breakdown.next_doc/]
|
||||
|
||||
Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall
|
||||
`"time"` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is
|
||||
`"time_in_nanos"` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is
|
||||
actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time,
|
||||
the breakdown is inclusive of all children times.
|
||||
|
||||
@ -401,12 +407,12 @@ Looking at the previous example:
|
||||
{
|
||||
"name": "CancellableCollector",
|
||||
"reason": "search_cancelled",
|
||||
"time": "0.3043110000ms",
|
||||
"time_in_nanos": "304311",
|
||||
"children": [
|
||||
{
|
||||
"name": "SimpleTopScoreDocCollector",
|
||||
"reason": "search_top_hits",
|
||||
"time": "0.03227300000ms"
|
||||
"time_in_nanos": "32273"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -414,12 +420,12 @@ Looking at the previous example:
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.profile.shards.0.id",\n"searches": [{\n"query": $body.profile.shards.0.searches.0.query,\n"rewrite_time": $body.profile.shards.0.searches.0.rewrite_time,/]
|
||||
// TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/]
|
||||
// TESTRESPONSE[s/"time": "0.3043110000ms"/"time": $body.profile.shards.0.searches.0.collector.0.time/]
|
||||
// TESTRESPONSE[s/"time": "0.03227300000ms"/"time": $body.profile.shards.0.searches.0.collector.0.children.0.time/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "304311"/"time_in_nanos": $body.profile.shards.0.searches.0.collector.0.time_in_nanos/]
|
||||
// TESTRESPONSE[s/"time_in_nanos": "32273"/"time_in_nanos": $body.profile.shards.0.searches.0.collector.0.children.0.time_in_nanos/]
|
||||
|
||||
We see a single collector named `SimpleTopScoreDocCollector` wrapped into `CancellableCollector`. `SimpleTopScoreDocCollector` is the default "scoring and sorting"
|
||||
`Collector` used by Elasticsearch. The `"reason"` field attempts to give a plain english description of the class name. The
|
||||
`"time"` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists
|
||||
`"time_in_nanos"` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists
|
||||
all sub-collectors. The `CancellableCollector` that wraps `SimpleTopScoreDocCollector` is used by elasticsearch to detect if the current
|
||||
search was cancelled and stop collecting documents as soon as it occurs.
|
||||
|
||||
@ -564,7 +570,7 @@ And the response:
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "my_field:foo",
|
||||
"time": "0.4094560000ms",
|
||||
"time_in_nanos": "409456",
|
||||
"breakdown": {
|
||||
"score": 0,
|
||||
"score_count": 1,
|
||||
@ -583,7 +589,7 @@ And the response:
|
||||
{
|
||||
"type": "TermQuery",
|
||||
"description": "message:search",
|
||||
"time": "0.3037020000ms",
|
||||
"time_in_nanos": "303702",
|
||||
"breakdown": {
|
||||
"score": 0,
|
||||
"score_count": 1,
|
||||
@ -605,24 +611,24 @@ And the response:
|
||||
{
|
||||
"name": "MultiCollector",
|
||||
"reason": "search_multi",
|
||||
"time": "1.378943000ms",
|
||||
"time_in_nanos": "1378943",
|
||||
"children": [
|
||||
{
|
||||
"name": "FilteredCollector",
|
||||
"reason": "search_post_filter",
|
||||
"time": "0.4036590000ms",
|
||||
"time_in_nanos": "403659",
|
||||
"children": [
|
||||
{
|
||||
"name": "SimpleTopScoreDocCollector",
|
||||
"reason": "search_top_hits",
|
||||
"time": "0.006391000000ms"
|
||||
"time_in_nanos": "6391"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "BucketCollector: [[non_global_term, another_agg]]",
|
||||
"reason": "aggregation",
|
||||
"time": "0.9546020000ms"
|
||||
"time_in_nanos": "954602"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -633,7 +639,7 @@ And the response:
|
||||
{
|
||||
"type": "MatchAllDocsQuery",
|
||||
"description": "*:*",
|
||||
"time": "0.04829300000ms",
|
||||
"time_in_nanos": "48293",
|
||||
"breakdown": {
|
||||
"score": 0,
|
||||
"score_count": 1,
|
||||
@ -655,7 +661,7 @@ And the response:
|
||||
{
|
||||
"name": "GlobalAggregator: [global_agg]",
|
||||
"reason": "aggregation_global",
|
||||
"time": "0.1226310000ms"
|
||||
"time_in_nanos": "122631"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -738,7 +744,7 @@ Which yields the following aggregation profile output
|
||||
{
|
||||
"type": "org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator",
|
||||
"description": "property_type",
|
||||
"time": "4280.456978ms",
|
||||
"time_in_nanos": "4280456978",
|
||||
"breakdown": {
|
||||
"reduce": 0,
|
||||
"reduce_count": 0,
|
||||
@ -753,7 +759,7 @@ Which yields the following aggregation profile output
|
||||
{
|
||||
"type": "org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator",
|
||||
"description": "avg_price",
|
||||
"time": "1124.864392ms",
|
||||
"time_in_nanos": "1124864392",
|
||||
"breakdown": {
|
||||
"reduce": 0,
|
||||
"reduce_count": 0,
|
||||
@ -773,7 +779,7 @@ Which yields the following aggregation profile output
|
||||
From the profile structure we can see our `property_type` terms aggregation which is internally represented by the
|
||||
`GlobalOrdinalsStringTermsAggregator` class and the sub aggregator `avg_price` which is internally represented by the `AvgAggregator` class. The `type` field displays the class used internally to represent the aggregation. The `description` field displays the name of the aggregation.
|
||||
|
||||
The `"time"` field shows that it took ~4 seconds for the entire aggregation to execute. The recorded time is inclusive
|
||||
The `"time_in_nanos"` field shows that it took ~4 seconds for the entire aggregation to execute. The recorded time is inclusive
|
||||
of all children.
|
||||
|
||||
The `"breakdown"` field will give detailed stats about how the time was spent, we'll look at
|
||||
|
Loading…
x
Reference in New Issue
Block a user