Tests: Add ability to generate random new fields for xContent parsing test (#23437)

For the response parsing we want to be lenient when it comes to parsing
new xContent fields. In order to ensure this in our testing, this change
adds a utility method to XContentTestUtils that takes xContent bytes
representation as input and recursively a random field on each object
level.

Sometimes we also want to exclude a whole subtree from this treatment 
(e.g. skipping "_source"), other times an element (e.g. "fields", "highlight" 
in SearchHit) can have arbitraryly named objects. Those cases can be 
specified as exceptions.
This commit is contained in:
Christoph Büscher 2017-06-07 21:01:20 +02:00 committed by GitHub
parent 68f1d4df5a
commit 9e741cd13d
10 changed files with 472 additions and 25 deletions

View File

@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.shard.ShardId;
@ -65,8 +66,8 @@ import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField;
/**
@ -482,7 +483,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
* of the included search hit. The output of the map is used to create the
* actual SearchHit instance via {@link #createFromMap(Map)}
*/
private static ObjectParser<Map<String, Object>, Void> MAP_PARSER = new ObjectParser<>("innerHitsParser", HashMap::new);
private static ObjectParser<Map<String, Object>, Void> MAP_PARSER = new ObjectParser<>("innerHitParser", true, HashMap::new);
static {
declareInnerHitsParseFields(MAP_PARSER);
@ -614,7 +615,10 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
Map<String, SearchHits> innerHits = new HashMap<>();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
innerHits.put(parser.currentName(), SearchHits.fromXContent(parser));
String name = parser.currentName();
ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS);
innerHits.put(name, SearchHits.fromXContent(parser));
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
return innerHits;
@ -649,7 +653,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
details.add(parseExplanation(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
parser.skipChildren();
}
}
if (value == null) {
@ -905,8 +909,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
return builder;
}
private static final ConstructingObjectParser<NestedIdentity, Void> PARSER = new ConstructingObjectParser<>(
"nested_identity",
private static final ConstructingObjectParser<NestedIdentity, Void> PARSER = new ConstructingObjectParser<>("nested_identity", true,
ctorArgs -> new NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (NestedIdentity) ctorArgs[2]));
static {
PARSER.declareString(constructorArg(), new ParseField(FIELD));

View File

@ -34,7 +34,6 @@ import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
public final class SearchHits implements Streamable, ToXContent, Iterable<SearchHit> {
@ -148,19 +147,21 @@ public final class SearchHits implements Streamable, ToXContent, Iterable<Search
totalHits = parser.longValue();
} else if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = parser.floatValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_NULL) {
if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = Float.NaN; // NaN gets rendered as null-field
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
hits.add(SearchHit.fromXContent(parser));
if (Fields.HITS.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
hits.add(SearchHit.fromXContent(parser));
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
parser.skipChildren();
}
}
SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[hits.size()]), totalHits,

View File

@ -36,6 +36,7 @@ import java.util.Date;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class MainResponseTests extends ESTestCase {
@ -55,8 +56,10 @@ public class MainResponseTests extends ESTestCase {
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toShuffledXContent(mainResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
// we add a few random fields to check that parser is lenient on new fields
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random());
MainResponse parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
parsed = MainResponse.fromXContent(parser);
assertNull(parser.nextToken());
}

View File

@ -49,8 +49,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
@ -140,7 +142,6 @@ public class SearchHitTests extends ESTestCase {
boolean humanReadable = randomBoolean();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
SearchHit parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
parser.nextToken(); // jump to first START_OBJECT
@ -151,6 +152,33 @@ public class SearchHitTests extends ESTestCase {
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
/**
* This test adds randomized fields on all json objects and checks that we can parse it to
* ensure the parsing is lenient for forward compatibility.
* We need to exclude json objects with the "highlight" and "fields" field name since these
* objects allow arbitrary keys (the field names that are queries). Also we want to exclude
* to add anything under "_source" since it is not parsed, and avoid complexity by excluding
* everything under "inner_hits". They are also keyed by arbitrary names and contain SearchHits,
* which are already tested elsewhere.
*/
public void testFromXContentLenientParsing() throws IOException {
SearchHit searchHit = createTestItem(true);
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toXContent(searchHit, xContentType, true);
Predicate<String> pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source")
|| path.contains("inner_hits"));
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
SearchHit parsed;
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
parser.nextToken(); // jump to first START_OBJECT
parsed = SearchHit.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType);
}
/**
* When e.g. with "stored_fields": "_none_", only "_index" and "_score" are returned.
*/

View File

@ -30,8 +30,10 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.function.Predicate;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class SearchHitsTests extends ESTestCase {
@ -54,6 +56,10 @@ public class SearchHitsTests extends ESTestCase {
BytesReference originalBytes = toShuffledXContent(searchHits, xcontentType, ToXContent.EMPTY_PARAMS, humanReadable);
SearchHits parsed;
try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(SearchHits.Fields.HITS, parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = SearchHits.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
@ -62,6 +68,35 @@ public class SearchHitsTests extends ESTestCase {
assertToXContentEquivalent(originalBytes, toXContent(parsed, xcontentType, humanReadable), xcontentType);
}
/**
* This test adds randomized fields on all json objects and checks that we
* can parse it to ensure the parsing is lenient for forward compatibility.
* We need to exclude json objects with the "highlight" and "fields" field
* name since these objects allow arbitrary keys (the field names that are
* queries). Also we want to exclude to add anything under "_source" since
* it is not parsed.
*/
public void testFromXContentLenientParsing() throws IOException {
SearchHits searchHits = createTestItem();
XContentType xcontentType = randomFrom(XContentType.values());
BytesReference originalBytes = toXContent(searchHits, xcontentType, ToXContent.EMPTY_PARAMS, true);
Predicate<String> pathsToExclude = path -> (path.isEmpty() || path.endsWith("highlight") || path.endsWith("fields")
|| path.contains("_source"));
BytesReference withRandomFields = insertRandomFields(xcontentType, originalBytes, pathsToExclude, random());
SearchHits parsed = null;
try (XContentParser parser = createParser(xcontentType.xContent(), withRandomFields)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(SearchHits.Fields.HITS, parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = SearchHits.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xcontentType, true), xcontentType);
}
public void testToXContent() throws IOException {
SearchHit[] hits = new SearchHit[] {
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()),

View File

@ -97,7 +97,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MockSearchService;
import org.elasticsearch.test.junit.listeners.LoggingListener;
@ -139,12 +138,9 @@ import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;

View File

@ -19,17 +19,30 @@
package org.elasticsearch.test;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.rest.yaml.ObjectPath;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Stack;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiOfLength;
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
import static org.elasticsearch.common.xcontent.XContentHelper.createParser;
public final class XContentTestUtils {
private XContentTestUtils() {
@ -123,4 +136,162 @@ public final class XContentTestUtils {
}
}
/**
* This method takes the input xContent data and adds a random field value, inner object or array into each
* json object. This can e.g. be used to test if parsers that handle the resulting xContent can handle the
* augmented xContent correctly, for example when testing lenient parsing.
*
* If the xContent output contains objects that should be skipped of such treatment, an optional filtering
* {@link Predicate} can be supplied that checks xContent paths that should be excluded from this treatment.
*
* This predicate should check the xContent path that we want to insert to and return <tt>true</tt> if the
* path should be excluded. Paths are string concatenating field names and array indices, so e.g. in:
*
* <pre>
* {
* "foo1 : {
* "bar" : [
* { ... },
* { ... },
* {
* "baz" : {
* // insert here
* }
* }
* ]
* }
* }
* </pre>
*
* "foo1.bar.2.baz" would point to the desired insert location.
*
* To exclude inserting into the "foo1" object we would user a {@link Predicate} like
* <pre>
* {@code
* (path) -> path.endsWith("foo1")
* }
* </pre>
*
* or if we don't want any random insertions in the "foo1" tree we could use
* <pre>
* {@code
* (path) -> path.contains("foo1")
* }
* </pre>
*/
public static BytesReference insertRandomFields(XContentType contentType, BytesReference xContent, Predicate<String> excludeFilter,
Random random) throws IOException {
List<String> insertPaths;
// we can use NamedXContentRegistry.EMPTY here because we only traverse the xContent once and don't use it
try (XContentParser parser = createParser(NamedXContentRegistry.EMPTY, xContent, contentType)) {
parser.nextToken();
List<String> possiblePaths = XContentTestUtils.getInsertPaths(parser, new Stack<>());
if (excludeFilter == null) {
insertPaths = possiblePaths;
} else {
insertPaths = new ArrayList<>();
possiblePaths.stream().filter(excludeFilter.negate()).forEach(insertPaths::add);
}
}
try (XContentParser parser = createParser(NamedXContentRegistry.EMPTY, xContent, contentType)) {
Supplier<Object> value = () -> {
if (random.nextBoolean()) {
return RandomObjects.randomStoredFieldValues(random, contentType);
} else {
if (random.nextBoolean()) {
return Collections.singletonMap(randomAsciiOfLength(random, 10), randomAsciiOfLength(random, 10));
} else {
return Collections.singletonList(randomAsciiOfLength(random, 10));
}
}
};
return XContentTestUtils
.insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value)
.bytes();
}
}
/**
* This utility method takes an XContentParser and walks the xContent structure to find all
* possible paths to where a new object or array starts. This can be used in tests that add random
* xContent values to test parsing code for errors or to check their robustness against new fields.
*
* The path uses dot separated fieldnames and numbers for array indices, similar to what we do in
* {@link ObjectPath}.
*
* The {@link Stack} passed in should initially be empty, it gets pushed to by recursive calls
*
* As an example, the following json xContent:
* <pre>
* {
* "foo" : "bar",
* "foo1" : [ 1, { "foo2" : "baz" }, 3, 4]
* "foo3" : {
* "foo4" : {
* "foo5": "buzz"
* }
* }
* }
* </pre>
*
* Would return the following list:
*
* <ul>
* <li>"" (the empty string is the path to the root object)</li>
* <li>"foo1.1"</li>
* <li>"foo3</li>
* <li>"foo3.foo4</li>
* </ul>
*/
static List<String> getInsertPaths(XContentParser parser, Stack<String> currentPath) throws IOException {
assert parser.currentToken() == XContentParser.Token.START_OBJECT || parser.currentToken() == XContentParser.Token.START_ARRAY :
"should only be called when new objects or arrays start";
List<String> validPaths = new ArrayList<>();
// parser.currentName() can be null for root object and unnamed objects in arrays
if (parser.currentName() != null) {
currentPath.push(parser.currentName());
}
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
validPaths.add(String.join(".", currentPath.toArray(new String[currentPath.size()])));
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
if (parser.currentToken() == XContentParser.Token.START_OBJECT
|| parser.currentToken() == XContentParser.Token.START_ARRAY) {
validPaths.addAll(getInsertPaths(parser, currentPath));
}
}
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
int itemCount = 0;
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
if (parser.currentToken() == XContentParser.Token.START_OBJECT
|| parser.currentToken() == XContentParser.Token.START_ARRAY) {
currentPath.push(Integer.toString(itemCount));
validPaths.addAll(getInsertPaths(parser, currentPath));
currentPath.pop();
}
itemCount++;
}
}
if (parser.currentName() != null) {
currentPath.pop();
}
return validPaths;
}
/**
* Inserts key/value pairs into xContent passed in as {@link BytesReference} and returns a new {@link XContentBuilder}
* The paths argument uses dot separated fieldnames and numbers for array indices, similar to what we do in
* {@link ObjectPath}.
* The key/value arguments can suppliers that either return fixed or random values.
*/
static XContentBuilder insertIntoXContent(XContent xContent, BytesReference original, List<String> paths, Supplier<String> key,
Supplier<Object> value) throws IOException {
ObjectPath object = ObjectPath.createFromXContent(xContent, original);
for (String path : paths) {
Map<String, Object> insertMap = object.evaluate(path);
insertMap.put(key.get(), value.get());
}
return object.toXContentBuilder(xContent);
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
@ -148,4 +149,20 @@ public class ObjectPath {
return list.toArray(new String[list.size()]);
}
/**
* Create a new {@link XContentBuilder} from the xContent object underlying this {@link ObjectPath}.
* This only works for {@link ObjectPath} instances created from an xContent object, not from nested
* substructures. We throw an {@link UnsupportedOperationException} in those cases.
*/
@SuppressWarnings("unchecked")
public XContentBuilder toXContentBuilder(XContent xContent) throws IOException {
XContentBuilder builder = XContentBuilder.builder(xContent);
if (this.object instanceof Map) {
builder.map((Map<String, Object>) this.object);
} else {
throw new UnsupportedOperationException("Only ObjectPath created from a map supported.");
}
return builder;
}
}

View File

@ -0,0 +1,188 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.function.Predicate;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;;
public class XContentTestUtilsTests extends ESTestCase {
public void testGetInsertPaths() throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
{
builder.field("field1", "value");
builder.startArray("list1");
{
builder.value(0);
builder.value(1);
builder.startObject();
builder.endObject();
builder.value(3);
builder.startObject();
builder.endObject();
}
builder.endArray();
builder.startObject("inner1");
{
builder.field("inner1field1", "value");
builder.startObject("inner2");
{
builder.field("inner2field1", "value");
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, builder.bytes(), builder.contentType())) {
parser.nextToken();
List<String> insertPaths = XContentTestUtils.getInsertPaths(parser, new Stack<>());
assertEquals(5, insertPaths.size());
assertThat(insertPaths, hasItem(equalTo("")));
assertThat(insertPaths, hasItem(equalTo("list1.2")));
assertThat(insertPaths, hasItem(equalTo("list1.4")));
assertThat(insertPaths, hasItem(equalTo("inner1")));
assertThat(insertPaths, hasItem(equalTo("inner1.inner2")));
}
}
@SuppressWarnings("unchecked")
public void testInsertIntoXContent() throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
builder.endObject();
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""),
() -> "inner1", () -> new HashMap<>());
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""),
() -> "field1", () -> "value1");
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList("inner1"),
() -> "inner2", () -> new HashMap<>());
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList("inner1"),
() -> "field2", () -> "value2");
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, builder.bytes(), builder.contentType())) {
Map<String, Object> map = parser.map();
assertEquals(2, map.size());
assertEquals("value1", map.get("field1"));
assertThat(map.get("inner1"), instanceOf(Map.class));
Map<String, Object> innerMap = (Map<String, Object>) map.get("inner1");
assertEquals(2, innerMap.size());
assertEquals("value2", innerMap.get("field2"));
assertThat(innerMap.get("inner2"), instanceOf(Map.class));
assertEquals(0, ((Map<String, Object>) innerMap.get("inner2")).size());
}
}
@SuppressWarnings("unchecked")
public void testInsertRandomXContent() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
{
builder.startObject("foo");
{
builder.field("bar", 1);
}
builder.endObject();
builder.startObject("foo1");
{
builder.startObject("foo2");
{
builder.field("buzz", 1);
}
builder.endObject();
}
builder.endObject();
builder.field("foo3", 2);
builder.startArray("foo4");
{
builder.startObject();
{
builder.field("foo5", 1);
}
builder.endObject();
}
builder.endArray();
}
builder.endObject();
Map<String, Object> resultMap;
try (XContentParser parser = createParser(XContentType.JSON.xContent(),
insertRandomFields(builder.contentType(), builder.bytes(), null, random()))) {
resultMap = parser.map();
}
assertEquals(5, resultMap.keySet().size());
assertEquals(2, ((Map<String, Object>) resultMap.get("foo")).keySet().size());
Map<String, Object> foo1 = (Map<String, Object>) resultMap.get("foo1");
assertEquals(2, foo1.keySet().size());
assertEquals(2, ((Map<String, Object>) foo1.get("foo2")).keySet().size());
List<Object> foo4List = (List<Object>) resultMap.get("foo4");
assertEquals(1, foo4List.size());
assertEquals(2, ((Map<String, Object>) foo4List.get(0)).keySet().size());
Predicate<String> pathsToExclude = path -> path.endsWith("foo1");
try (XContentParser parser = createParser(XContentType.JSON.xContent(),
insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) {
resultMap = parser.map();
}
assertEquals(5, resultMap.keySet().size());
assertEquals(2, ((Map<String, Object>) resultMap.get("foo")).keySet().size());
foo1 = (Map<String, Object>) resultMap.get("foo1");
assertEquals(1, foo1.keySet().size());
assertEquals(2, ((Map<String, Object>) foo1.get("foo2")).keySet().size());
foo4List = (List<Object>) resultMap.get("foo4");
assertEquals(1, foo4List.size());
assertEquals(2, ((Map<String, Object>) foo4List.get(0)).keySet().size());
pathsToExclude = path -> path.contains("foo1");
try (XContentParser parser = createParser(XContentType.JSON.xContent(),
insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) {
resultMap = parser.map();
}
assertEquals(5, resultMap.keySet().size());
assertEquals(2, ((Map<String, Object>) resultMap.get("foo")).keySet().size());
foo1 = (Map<String, Object>) resultMap.get("foo1");
assertEquals(1, foo1.keySet().size());
assertEquals(1, ((Map<String, Object>) foo1.get("foo2")).keySet().size());
foo4List = (List<Object>) resultMap.get("foo4");
assertEquals(1, foo4List.size());
assertEquals(2, ((Map<String, Object>) foo4List.get(0)).keySet().size());
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.test.test;
import junit.framework.AssertionFailedError;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -97,16 +98,20 @@ public class ESTestCaseTests extends ESTestCase {
builder.field("field2", "value2");
{
builder.startObject("object1");
builder.field("inner1", "value1");
builder.field("inner2", "value2");
builder.field("inner3", "value3");
{
builder.field("inner1", "value1");
builder.field("inner2", "value2");
builder.field("inner3", "value3");
}
builder.endObject();
}
{
builder.startObject("object2");
builder.field("inner4", "value4");
builder.field("inner5", "value5");
builder.field("inner6", "value6");
{
builder.field("inner4", "value4");
builder.field("inner5", "value5");
builder.field("inner6", "value6");
}
builder.endObject();
}
}