[Tests] Check QueryProfileShardResult parser robustness for new fields (#25130)

When parsing resonses we should be ignoring any new unknown fields or inner
objects in most cases to be forward compatible with changes in core on the
client side. This change adds test for this for QueryProfileShardResult and
nested substructures and changes the parsing code where necessary to be able to
ignore new fields and objects in the xContent.
This commit is contained in:
Christoph Büscher 2017-06-08 16:40:00 +02:00 committed by GitHub
parent 4a8c09c5f1
commit a0afa917ac
7 changed files with 99 additions and 40 deletions

View File

@ -37,7 +37,6 @@ import java.util.Objects;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
/**
* This class is the internal representation of a profiled Query, corresponding
@ -50,12 +49,12 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknown
*/
public final class ProfileResult implements Writeable, ToXContentObject {
private static final ParseField TYPE = new ParseField("type");
private static final ParseField DESCRIPTION = new ParseField("description");
private static final ParseField NODE_TIME = new ParseField("time");
private static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
private static final ParseField CHILDREN = new ParseField("children");
private static final ParseField BREAKDOWN = new ParseField("breakdown");
static final ParseField TYPE = new ParseField("type");
static final ParseField DESCRIPTION = new ParseField("description");
static final ParseField NODE_TIME = new ParseField("time");
static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
static final ParseField CHILDREN = new ParseField("children");
static final ParseField BREAKDOWN = new ParseField("breakdown");
private final String type;
private final String description;
@ -188,7 +187,7 @@ public final class ProfileResult implements Writeable, ToXContentObject {
// skip, total time is calculate by adding up 'timings' values in ProfileResult ctor
parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (BREAKDOWN.match(currentFieldName)) {
@ -200,7 +199,7 @@ public final class ProfileResult implements Writeable, ToXContentObject {
timings.put(name, value);
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (CHILDREN.match(currentFieldName)) {
@ -208,7 +207,7 @@ public final class ProfileResult implements Writeable, ToXContentObject {
children.add(ProfileResult.fromXContent(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
}
}

View File

@ -39,9 +39,6 @@ import java.util.Map;
import java.util.TreeSet;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* A container class to hold all the profile results across all shards. Internally
@ -111,12 +108,19 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
Map<String, ProfileShardResult> searchProfileResults = new HashMap<>();
ensureFieldName(parser, parser.nextToken(), SHARDS_FIELD);
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseSearchProfileResultsEntry(parser, searchProfileResults);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_ARRAY) {
if (SHARDS_FIELD.equals(parser.currentName())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseSearchProfileResultsEntry(parser, searchProfileResults);
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
parser.skipChildren();
}
}
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return new SearchProfileShardResults(searchProfileResults);
}
@ -135,7 +139,7 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{
if (ID_FIELD.equals(currentFieldName)) {
id = parser.text();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (SEARCHES_FIELD.equals(currentFieldName)) {
@ -145,10 +149,10 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{
} else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) {
aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser);
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
parser.skipChildren();
}
}
searchProfileResults.put(id, new ProfileShardResult(queryProfileResults, aggProfileShardResult));

View File

@ -34,8 +34,6 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* Public interface and serialization container for profiled timings of the
@ -181,7 +179,7 @@ public class CollectorResult implements ToXContentObject, Writeable {
} else if (TIME_NANOS.match(currentFieldName)) {
time = parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (CHILDREN.match(currentFieldName)) {
@ -189,10 +187,10 @@ public class CollectorResult implements ToXContentObject, Writeable {
children.add(CollectorResult.fromXContent(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
parser.skipChildren();
}
}
return new CollectorResult(name, reason, time, children);

View File

@ -33,8 +33,6 @@ import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
/**
* A container class to hold the profile results for a single shard in the request.
@ -127,7 +125,7 @@ public final class QueryProfileShardResult implements Writeable, ToXContentObjec
if (REWRITE_TIME.equals(currentFieldName)) {
rewriteTime = parser.longValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (QUERY_ARRAY.equals(currentFieldName)) {
@ -139,10 +137,10 @@ public final class QueryProfileShardResult implements Writeable, ToXContentObjec
collector = CollectorResult.fromXContent(parser);
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
parser.skipChildren();
}
}
return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector);

View File

@ -33,9 +33,11 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class ProfileResultTests extends ESTestCase {
@ -62,12 +64,32 @@ public class ProfileResultTests extends ESTestCase {
}
public void testFromXContent() throws IOException {
doFromXContentTestWithRandomFields(false);
}
/**
* This test adds random fields and objects to the xContent rendered out to ensure we can parse it
* back to be forward compatible with additions to the xContent
*/
public void testFromXContentWithRandomFields() throws IOException {
doFromXContentTestWithRandomFields(true);
}
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
ProfileResult profileResult = createTestItem(2);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
BytesReference mutated;
if (addRandomFields) {
// "breakdown" just consists of key/value pairs, we shouldn't add anything random there
Predicate<String> excludeFilter = (s) -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName());
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else {
mutated = originalBytes;
}
ProfileResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = ProfileResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());

View File

@ -34,10 +34,12 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;;
public class SearchProfileShardResultsTests extends ESTestCase {
@ -58,20 +60,43 @@ public class SearchProfileShardResultsTests extends ESTestCase {
}
public void testFromXContent() throws IOException {
doFromXContentTestWithRandomFields(false);
}
/**
* This test adds random fields and objects to the xContent rendered out to ensure we can parse it
* back to be forward compatible with additions to the xContent
*/
public void testFromXContentWithRandomFields() throws IOException {
doFromXContentTestWithRandomFields(true);
}
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
SearchProfileShardResults shardResult = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toShuffledXContent(shardResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
BytesReference mutated;
if (addRandomFields) {
// The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there
// also we don't want to insert into the root object here, its just the PROFILE_FIELD itself
Predicate<String> excludeFilter = (s) -> (s.isEmpty() || s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()));
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else {
mutated = originalBytes;
}
SearchProfileShardResults parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
ensureFieldName(parser, parser.nextToken(), SearchProfileShardResults.PROFILE_FIELD);
ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
parsed = SearchProfileShardResults.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
}

View File

@ -34,6 +34,7 @@ import java.util.List;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class CollectorResultTests extends ESTestCase {
@ -57,18 +58,30 @@ public class CollectorResultTests extends ESTestCase {
}
public void testFromXContent() throws IOException {
doFromXContentTestWithRandomFields(false);
}
public void testFromXContentWithRandomFields() throws IOException {
doFromXContentTestWithRandomFields(true);
}
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
CollectorResult collectorResult = createTestItem(1);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toShuffledXContent(collectorResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
CollectorResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = CollectorResult.fromXContent(parser);
assertNull(parser.nextToken());
BytesReference mutated;
if (addRandomFields) {
mutated = insertRandomFields(xContentType, originalBytes, null, random());
} else {
mutated = originalBytes;
}
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
CollectorResult parsed = CollectorResult.fromXContent(parser);
assertNull(parser.nextToken());
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
public void testToXContent() throws IOException {