Add parsing from xContent to SearchProfileShardResults and nested classes (#22649)

In preparation for being able to parse SearchResponse from its rest representation
for the java rest client, this adds fromXContent to SearchProfileShardResults and its
nested classes.
This commit is contained in:
Christoph Büscher 2017-01-19 16:29:10 +01:00 committed by GitHub
parent b781a4a176
commit e03554070c
12 changed files with 605 additions and 62 deletions

View File

@ -376,12 +376,17 @@ public class XContentHelper {
}
}
public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType) throws IOException {
return toXContent(toXContent, xContentType, false);
}
/**
* Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided
* {@link XContentType}. Wraps the output into a new anonymous object.
*/
public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType) throws IOException {
public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, boolean humanReadable) throws IOException {
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
builder.humanReadable(humanReadable);
if (toXContent.isFragment()) {
builder.startObject();
}

View File

@ -162,10 +162,9 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
// TODO this would be better done bottom-up instead of top-down to avoid
// calculating the same times over and over...but worth the effort?
long nodeTime = getNodeTime(timings);
String type = getTypeFromElement(element);
String description = getDescriptionFromElement(element);
return new ProfileResult(type, description, timings, childrenProfileResults, nodeTime);
return new ProfileResult(type, description, timings, childrenProfileResults);
}
protected abstract String getTypeFromElement(E element);
@ -184,19 +183,4 @@ public abstract class AbstractInternalProfileTree<PB extends AbstractProfileBrea
tree.set(parent, parentNode);
}
/**
* Internal helper to calculate the time of a node, inclusive of children
*
* @param timings
* A map of breakdown timing for the node
* @return The total time at this node, inclusive of children
*/
private static long getNodeTime(Map<String, Long> timings) {
long nodeTime = 0;
for (long time : timings.values()) {
nodeTime += time;
}
return nodeTime;
}
}

View File

@ -23,8 +23,9 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
@ -32,8 +33,12 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
/**
* This class is the internal representation of a profiled Query, corresponding
* to a single node in the query tree. It is built after the query has finished executing
@ -43,7 +48,7 @@ import java.util.concurrent.TimeUnit;
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
* "children" queries if applicable
*/
public final class ProfileResult implements Writeable, ToXContent {
public final class ProfileResult implements Writeable, ToXContentObject {
private static final ParseField TYPE = new ParseField("type");
private static final ParseField DESCRIPTION = new ParseField("description");
@ -58,13 +63,12 @@ public final class ProfileResult implements Writeable, ToXContent {
private final long nodeTime;
private final List<ProfileResult> children;
public ProfileResult(String type, String description, Map<String, Long> timings, List<ProfileResult> children,
long nodeTime) {
public ProfileResult(String type, String description, Map<String, Long> timings, List<ProfileResult> children) {
this.type = type;
this.description = description;
this.timings = timings;
this.timings = Objects.requireNonNull(timings, "required timings argument missing");
this.children = children;
this.nodeTime = nodeTime;
this.nodeTime = getTotalTime(timings);
}
/**
@ -162,4 +166,65 @@ public final class ProfileResult implements Writeable, ToXContent {
return builder;
}
public static ProfileResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
String currentFieldName = null;
String type = null, description = null;
Map<String, Long> timings = new HashMap<>();
List<ProfileResult> children = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (TYPE.match(currentFieldName)) {
type = parser.text();
} else if (DESCRIPTION.match(currentFieldName)) {
description = parser.text();
} else if (NODE_TIME.match(currentFieldName)) {
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
parser.text();
} else if (NODE_TIME_RAW.match(currentFieldName)) {
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (BREAKDOWN.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(parser.currentToken(), XContentParser.Token.FIELD_NAME, parser::getTokenLocation);
String name = parser.currentName();
ensureExpectedToken(parser.nextToken(), XContentParser.Token.VALUE_NUMBER, parser::getTokenLocation);
long value = parser.longValue();
timings.put(name, value);
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (CHILDREN.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
children.add(ProfileResult.fromXContent(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
}
}
return new ProfileResult(type, description, timings, children);
}
/**
* @param timings a map of breakdown timing for the node
* @return The total time at this node
*/
private static long getTotalTime(Map<String, Long> timings) {
long nodeTime = 0;
for (long time : timings.values()) {
nodeTime += time;
}
return nodeTime;
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
import org.elasticsearch.search.profile.aggregation.AggregationProfiler;
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
@ -35,6 +36,12 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* A container class to hold all the profile results across all shards. Internally
@ -42,6 +49,11 @@ import java.util.Map;
*/
public final class SearchProfileShardResults implements Writeable, ToXContent{
private static final String SEARCHES_FIELD = "searches";
private static final String ID_FIELD = "id";
private static final String SHARDS_FIELD = "shards";
public static final String PROFILE_FIELD = "profile";
private Map<String, ProfileShardResult> shardResults;
public SearchProfileShardResults(Map<String, ProfileShardResult> shardResults) {
@ -75,26 +87,73 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("profile").startArray("shards");
for (Map.Entry<String, ProfileShardResult> entry : shardResults.entrySet()) {
builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD);
// shardResults is a map, but we print entries in a json array, which is ordered.
// we sort the keys of the map, so that toXContent always prints out the same array order
TreeSet<String> sortedKeys = new TreeSet<>(shardResults.keySet());
for (String key : sortedKeys) {
builder.startObject();
builder.field("id", entry.getKey());
builder.startArray("searches");
for (QueryProfileShardResult result : entry.getValue().getQueryProfileResults()) {
builder.startObject();
builder.field(ID_FIELD, key);
builder.startArray(SEARCHES_FIELD);
ProfileShardResult profileShardResult = shardResults.get(key);
for (QueryProfileShardResult result : profileShardResult.getQueryProfileResults()) {
result.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
entry.getValue().getAggregationProfileResults().toXContent(builder, params);
profileShardResult.getAggregationProfileResults().toXContent(builder, params);
builder.endObject();
}
builder.endArray().endObject();
return builder;
}
public static SearchProfileShardResults fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
Map<String, ProfileShardResult> searchProfileResults = new HashMap<>();
ensureFieldName(parser, parser.nextToken(), SHARDS_FIELD);
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseSearchProfileResultsEntry(parser, searchProfileResults);
}
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return new SearchProfileShardResults(searchProfileResults);
}
private static void parseSearchProfileResultsEntry(XContentParser parser,
Map<String, ProfileShardResult> searchProfileResults) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
AggregationProfileShardResult aggProfileShardResult = null;
String id = null;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (ID_FIELD.equals(currentFieldName)) {
id = parser.text();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (SEARCHES_FIELD.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
queryProfileResults.add(QueryProfileShardResult.fromXContent(parser));
}
} else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) {
aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser);
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
}
searchProfileResults.put(id, new ProfileShardResult(queryProfileResults, aggProfileShardResult));
}
/**
* Helper method to convert Profiler into InternalProfileShardResults, which
* can be serialized to other nodes, emitted as JSON, etc.

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.profile.ProfileResult;
import java.io.IOException;
@ -31,12 +32,15 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* A container class to hold the profile results for a single shard in the request.
* Contains a list of query profiles, a collector tree and a total rewrite tree.
*/
public final class AggregationProfileShardResult implements Writeable, ToXContent {
public static final String AGGREGATIONS = "aggregations";
private final List<ProfileResult> aggProfileResults;
public AggregationProfileShardResult(List<ProfileResult> aggProfileResults) {
@ -69,11 +73,21 @@ public final class AggregationProfileShardResult implements Writeable, ToXConten
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("aggregations");
builder.startArray(AGGREGATIONS);
for (ProfileResult p : aggProfileResults) {
p.toXContent(builder, params);
}
builder.endArray();
return builder;
}
public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
List<ProfileResult> aggProfileResults = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
aggProfileResults.add(ProfileResult.fromXContent(parser));
}
return new AggregationProfileShardResult(aggProfileResults);
}
}

View File

@ -24,19 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* Public interface and serialization container for profiled timings of the
* Collectors used in the search. Children CollectorResult's may be
* embedded inside of a parent CollectorResult
*/
public class CollectorResult implements ToXContent, Writeable {
public class CollectorResult implements ToXContentObject, Writeable {
public static final String REASON_SEARCH_COUNT = "search_count";
public static final String REASON_SEARCH_TOP_HITS = "search_top_hits";
@ -153,4 +159,42 @@ public class CollectorResult implements ToXContent, Writeable {
builder = builder.endObject();
return builder;
}
public static CollectorResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
String currentFieldName = null;
String name = null, reason = null;
long time = -1;
List<CollectorResult> children = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (NAME.match(currentFieldName)) {
name = parser.text();
} else if (REASON.match(currentFieldName)) {
reason = parser.text();
} else if (TIME.match(currentFieldName)) {
// we need to consume this value, but we use the raw nanosecond value
parser.text();
} else if (TIME_NANOS.match(currentFieldName)) {
time = parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (CHILDREN.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
children.add(CollectorResult.fromXContent(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
}
return new CollectorResult(name, reason, time, children);
}
}

View File

@ -22,8 +22,9 @@ package org.elasticsearch.search.profile.query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.profile.ProfileResult;
import java.io.IOException;
@ -31,11 +32,19 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* A container class to hold the profile results for a single shard in the request.
* Contains a list of query profiles, a collector tree and a total rewrite tree.
*/
public final class QueryProfileShardResult implements Writeable, ToXContent {
public final class QueryProfileShardResult implements Writeable, ToXContentObject {
public static final String COLLECTOR = "collector";
public static final String REWRITE_TIME = "rewrite_time";
public static final String QUERY_ARRAY = "query";
private final List<ProfileResult> queryProfileResults;
@ -90,15 +99,52 @@ public final class QueryProfileShardResult implements Writeable, ToXContent {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("query");
builder.startObject();
builder.startArray(QUERY_ARRAY);
for (ProfileResult p : queryProfileResults) {
p.toXContent(builder, params);
}
builder.endArray();
builder.field("rewrite_time", rewriteTime);
builder.startArray("collector");
builder.field(REWRITE_TIME, rewriteTime);
builder.startArray(COLLECTOR);
profileCollector.toXContent(builder, params);
builder.endArray();
builder.endObject();
return builder;
}
public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
String currentFieldName = null;
List<ProfileResult> queryProfileResults = new ArrayList<>();
long rewriteTime = 0;
CollectorResult collector = null;
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (REWRITE_TIME.equals(currentFieldName)) {
rewriteTime = parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (QUERY_ARRAY.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
queryProfileResults.add(ProfileResult.fromXContent(parser));
}
} else if (COLLECTOR.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
collector = CollectorResult.fromXContent(parser);
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
}
return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector);
}
}

View File

@ -19,9 +19,12 @@
package org.elasticsearch.search.profile;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -31,38 +34,83 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class ProfileResultTests extends ESTestCase {
public static ProfileResult createTestItem(int depth) {
String type = randomAsciiOfLengthBetween(5, 10);
String description = randomAsciiOfLengthBetween(5, 10);
int timingsSize = randomIntBetween(0, 5);
Map<String, Long> timings = new HashMap<>(timingsSize);
for (int i = 0; i < timingsSize; i++) {
long time = randomNonNegativeLong() / timingsSize;
if (randomBoolean()) {
// also often use "small" values in tests
time = randomNonNegativeLong() % 10000;
}
timings.put(randomAsciiOfLengthBetween(5, 10), time); // don't overflow Long.MAX_VALUE;
}
int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0;
List<ProfileResult> children = new ArrayList<>(childrenSize);
for (int i = 0; i < childrenSize; i++) {
children.add(createTestItem(depth - 1));
}
return new ProfileResult(type, description, timings, children);
}
public void testFromXContent() throws IOException {
ProfileResult profileResult = createTestItem(2);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(profileResult, xContentType, humanReadable);
ProfileResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = ProfileResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertEquals(profileResult.getTime(), parsed.getTime());
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
public void testToXContent() throws IOException {
List<ProfileResult> children = new ArrayList<>();
children.add(new ProfileResult("child1", "desc1", Collections.emptyMap(), Collections.emptyList(), 100L));
children.add(new ProfileResult("child2", "desc2", Collections.emptyMap(), Collections.emptyList(), 123356L));
Map<String, Long> timings = new HashMap<>();
timings.put("key1", 12345L);
timings.put("key2", 6789L);
ProfileResult result = new ProfileResult("someType", "some description", timings, children, 123456L);
children.add(new ProfileResult("child1", "desc1", Collections.singletonMap("key1", 100L), Collections.emptyList()));
children.add(new ProfileResult("child2", "desc2", Collections.singletonMap("key1", 123356L), Collections.emptyList()));
Map<String, Long> timings3 = new HashMap<>();
timings3.put("key1", 123456L);
timings3.put("key2", 100000L);
ProfileResult result = new ProfileResult("someType", "some description", timings3, children);
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
" \"type\" : \"someType\",\n" +
" \"description\" : \"some description\",\n" +
" \"time_in_nanos\" : 123456,\n" +
" \"time_in_nanos\" : 223456,\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 12345,\n" +
" \"key2\" : 6789\n" +
" \"key1\" : 123456,\n" +
" \"key2\" : 100000\n" +
" },\n" +
" \"children\" : [\n" +
" {\n" +
" \"type\" : \"child1\",\n" +
" \"description\" : \"desc1\",\n" +
" \"time_in_nanos\" : 100,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 100\n" +
" }\n" +
" },\n" +
" {\n" +
" \"type\" : \"child2\",\n" +
" \"description\" : \"desc2\",\n" +
" \"time_in_nanos\" : 123356,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 123356\n" +
" }\n" +
" }\n" +
" ]\n" +
"}", builder.string());
@ -72,11 +120,11 @@ public class ProfileResultTests extends ESTestCase {
assertEquals("{\n" +
" \"type\" : \"someType\",\n" +
" \"description\" : \"some description\",\n" +
" \"time\" : \"123.4micros\",\n" +
" \"time_in_nanos\" : 123456,\n" +
" \"time\" : \"223.4micros\",\n" +
" \"time_in_nanos\" : 223456,\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 12345,\n" +
" \"key2\" : 6789\n" +
" \"key1\" : 123456,\n" +
" \"key2\" : 100000\n" +
" },\n" +
" \"children\" : [\n" +
" {\n" +
@ -84,19 +132,23 @@ public class ProfileResultTests extends ESTestCase {
" \"description\" : \"desc1\",\n" +
" \"time\" : \"100nanos\",\n" +
" \"time_in_nanos\" : 100,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 100\n" +
" }\n" +
" },\n" +
" {\n" +
" \"type\" : \"child2\",\n" +
" \"description\" : \"desc2\",\n" +
" \"time\" : \"123.3micros\",\n" +
" \"time_in_nanos\" : 123356,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 123356\n" +
" }\n" +
" }\n" +
" ]\n" +
"}", builder.string());
result = new ProfileResult("profileName", "some description", Collections.emptyMap(), Collections.emptyList(), 12345678L);
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 12345678L), Collections.emptyList());
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
@ -104,10 +156,13 @@ public class ProfileResultTests extends ESTestCase {
" \"description\" : \"some description\",\n" +
" \"time\" : \"12.3ms\",\n" +
" \"time_in_nanos\" : 12345678,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 12345678\n" +
" }\n" +
"}", builder.string());
result = new ProfileResult("profileName", "some description", Collections.emptyMap(), Collections.emptyList(), 1234567890L);
result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 1234567890L),
Collections.emptyList());
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
@ -115,7 +170,9 @@ public class ProfileResultTests extends ESTestCase {
" \"description\" : \"some description\",\n" +
" \"time\" : \"1.2s\",\n" +
" \"time_in_nanos\" : 1234567890,\n" +
" \"breakdown\" : { }\n" +
" \"breakdown\" : {\n" +
" \"key1\" : 1234567890\n" +
" }\n" +
"}", builder.string());
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.profile;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResultTests;
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
import org.elasticsearch.search.profile.query.QueryProfileShardResultTests;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;;
public class SearchProfileShardResultsTests extends ESTestCase {
public static SearchProfileShardResults createTestItem() {
int size = rarely() ? 0 : randomIntBetween(1, 2);
Map<String, ProfileShardResult> searchProfileResults = new HashMap<>(size);
for (int i = 0; i < size; i++) {
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
int queryItems = rarely() ? 0 : randomIntBetween(1, 2);
for (int q = 0; q < queryItems; q++) {
queryProfileResults.add(QueryProfileShardResultTests.createTestItem());
}
AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1);
searchProfileResults.put(randomAsciiOfLengthBetween(5, 10), new ProfileShardResult(queryProfileResults, aggProfileShardResult));
}
return new SearchProfileShardResults(searchProfileResults);
}
public void testFromXContent() throws IOException {
SearchProfileShardResults shardResult = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toXContent(shardResult, xContentType);
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
ensureFieldName(parser, parser.nextToken(), SearchProfileShardResults.PROFILE_FIELD);
ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
SearchProfileShardResults parsed = SearchProfileShardResults.fromXContent(parser);
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType), xContentType);
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
}
}

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the \"License\"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.profile.aggregation;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.search.profile.ProfileResultTests;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class AggregationProfileShardResultTests extends ESTestCase {
public static AggregationProfileShardResult createTestItem(int depth) {
int size = randomIntBetween(0, 5);
List<ProfileResult> aggProfileResults = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
aggProfileResults.add(ProfileResultTests.createTestItem(1));
}
return new AggregationProfileShardResult(aggProfileResults);
}
public void testFromXContent() throws IOException {
AggregationProfileShardResult profileResult = createTestItem(2);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(profileResult, xContentType, humanReadable);
AggregationProfileShardResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
XContentParserUtils.ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS);
XContentParserUtils.ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_ARRAY, parser::getTokenLocation);
parsed = AggregationProfileShardResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
public void testToXContent() throws IOException {
List<ProfileResult> profileResults = new ArrayList<>();
Map<String, Long> timings = new HashMap<>();
timings.put("timing1", 2000L);
timings.put("timing2", 4000L);
ProfileResult profileResult = new ProfileResult("someType", "someDescription", timings, Collections.emptyList());
profileResults.add(profileResult);
AggregationProfileShardResult aggProfileResults = new AggregationProfileShardResult(profileResults);
BytesReference xContent = toXContent(aggProfileResults, XContentType.JSON);
assertEquals("{\"aggregations\":["
+ "{\"type\":\"someType\","
+ "\"description\":\"someDescription\","
+ "\"time_in_nanos\":6000,"
+ "\"breakdown\":{\"timing1\":2000,\"timing2\":4000}"
+ "}"
+ "]}", xContent.utf8ToString());
}
}

View File

@ -19,9 +19,12 @@
package org.elasticsearch.search.profile.query;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -29,8 +32,45 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class CollectorResultTests extends ESTestCase {
public static CollectorResult createTestItem(int depth) {
String name = randomAsciiOfLengthBetween(5, 10);
String reason = randomAsciiOfLengthBetween(5, 10);
long time = randomNonNegativeLong();
if (randomBoolean()) {
// also often use relatively "small" values, otherwise we will mostly test huge longs
time = time % 100000;
}
int size = randomIntBetween(0, 5);
List<CollectorResult> children = new ArrayList<>(size);
if (depth > 0) {
for (int i = 0; i < size; i++) {
children.add(createTestItem(depth - 1));
}
}
return new CollectorResult(name, reason, time, children);
}
public void testFromXContent() throws IOException {
CollectorResult collectorResult = createTestItem(1);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(collectorResult, xContentType, humanReadable);
CollectorResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = CollectorResult.fromXContent(parser);
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
public void testToXContent() throws IOException {
List<CollectorResult> children = new ArrayList<>();
children.add(new CollectorResult("child1", "reason1", 100L, Collections.emptyList()));

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.profile.query;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.search.profile.ProfileResultTests;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class QueryProfileShardResultTests extends ESTestCase {
public static QueryProfileShardResult createTestItem() {
int size = randomIntBetween(0, 5);
List<ProfileResult> queryProfileResults = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
queryProfileResults.add(ProfileResultTests.createTestItem(1));
}
CollectorResult profileCollector = CollectorResultTests.createTestItem(2);
long rewriteTime = randomNonNegativeLong();
if (randomBoolean()) {
rewriteTime = rewriteTime % 1000; // make sure to often test this with small values too
}
return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector);
}
public void testFromXContent() throws IOException {
QueryProfileShardResult profileResult = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toXContent(profileResult, xContentType);
QueryProfileShardResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
XContentParserUtils.ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
parsed = QueryProfileShardResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType), xContentType);
}
}