[Tests] Check that parsing aggregations works in a forward compatible way (#25219)
This change adds tests for the aggregation parsing that try to simulate that we can parse existing aggregations in a forward compatible way in the future, ignoring potential newly added fields or substructures to the xContent response.
This commit is contained in:
parent
fde6f72cb5
commit
e99ced06cc
|
@ -23,10 +23,10 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
|
@ -115,29 +115,39 @@ public final class XContentParserUtils {
|
|||
* (ex: terms#foo where "terms" refers to the type of a registered {@link NamedXContentRegistry.Entry},
|
||||
* "#" is the delimiter and "foo" the name of the object to parse).
|
||||
*
|
||||
* It also expected that following this field name is either an Object or an array xContent structure and
|
||||
* the cursor points to the start token of this structure.
|
||||
*
|
||||
* The method splits the field's name to extract the type and name and then parses the object
|
||||
* using the {@link XContentParser#namedObject(Class, String, Object)} method.
|
||||
*
|
||||
* @param parser the current {@link XContentParser}
|
||||
* @param delimiter the delimiter to use to splits the field's name
|
||||
* @param objectClass the object class of the object to parse
|
||||
* @param consumer something to consume the parsed object
|
||||
* @param <T> the type of the object to parse
|
||||
* @return the parsed object
|
||||
* @throws IOException if anything went wrong during parsing or if the type or name cannot be derived
|
||||
* from the field's name
|
||||
* @throws ParsingException if the parser isn't positioned on either START_OBJECT or START_ARRAY at the beginning
|
||||
*/
|
||||
public static <T> T parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass) throws IOException {
|
||||
public static <T> void parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass, Consumer<T> consumer)
|
||||
throws IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT && parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throwUnknownToken(parser.currentToken(), parser.getTokenLocation());
|
||||
}
|
||||
String currentFieldName = parser.currentName();
|
||||
if (Strings.hasLength(currentFieldName)) {
|
||||
int position = currentFieldName.indexOf(delimiter);
|
||||
if (position > 0) {
|
||||
String type = currentFieldName.substring(0, position);
|
||||
String name = currentFieldName.substring(position + 1);
|
||||
return parser.namedObject(objectClass, type, name);
|
||||
consumer.accept(parser.namedObject(objectClass, type, name));
|
||||
return;
|
||||
}
|
||||
// if we didn't find a delimiter we ignore the object or array for forward compatibility instead of throwing an error
|
||||
parser.skipChildren();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: empty key");
|
||||
}
|
||||
throw new ParsingException(parser.getTokenLocation(), "Cannot parse object of class [" + objectClass.getSimpleName()
|
||||
+ "] without type information. Set [" + RestSearchAction.TYPED_KEYS_PARAM + "] parameter on the request to ensure the"
|
||||
+ " type information is added to the response output");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,10 +18,11 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -29,10 +30,12 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject;
|
||||
|
||||
/**
|
||||
* Represents a set of {@link Aggregation}s
|
||||
|
@ -133,7 +136,15 @@ public class Aggregations implements Iterable<Aggregation>, ToXContent {
|
|||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
SetOnce<Aggregation> typedAgg = new SetOnce<>();
|
||||
String currentField = parser.currentName();
|
||||
parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, typedAgg::set);
|
||||
if (typedAgg.get() != null) {
|
||||
aggregations.add(typedAgg.get());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField));
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Aggregations(aggregations);
|
||||
|
|
|
@ -171,7 +171,8 @@ public abstract class ParsedMultiBucketAggregation<B extends ParsedMultiBucketAg
|
|||
bucket.setDocCount(parser.longValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -83,7 +83,8 @@ public abstract class ParsedSingleBucketAggregation extends ParsedAggregation im
|
|||
if (CommonFields.META.getPreferredName().equals(currentFieldName)) {
|
||||
aggregation.metadata = parser.map();
|
||||
} else {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,7 +131,8 @@ public class ParsedFilters extends ParsedMultiBucketAggregation<ParsedFilters.Pa
|
|||
bucket.setDocCount(parser.longValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -146,7 +146,8 @@ public class ParsedBinaryRange extends ParsedMultiBucketAggregation<ParsedBinary
|
|||
bucket.to = parser.text();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -179,7 +179,8 @@ public class ParsedRange extends ParsedMultiBucketAggregation<ParsedRange.Parsed
|
|||
bucket.toAsString = parser.text();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -153,6 +153,7 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException;
|
||||
|
||||
static <B extends ParsedBucket> B parseSignificantTermsBucketXContent(final XContentParser parser, final B bucket,
|
||||
|
@ -179,7 +180,8 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio
|
|||
bucket.supersetDf = parser.longValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -136,7 +136,8 @@ public abstract class ParsedTerms extends ParsedMultiBucketAggregation<ParsedTer
|
|||
bucket.showDocCountError = true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
|
|
|
@ -140,6 +140,8 @@ public abstract class ParsedPercentiles extends ParsedAggregation implements Ite
|
|||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.NaN);
|
||||
} else {
|
||||
parser.skipChildren(); // skip potential inner objects and arrays for forward compatibility
|
||||
}
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
|
@ -164,6 +166,8 @@ public abstract class ParsedPercentiles extends ParsedAggregation implements Ite
|
|||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
value = Double.NaN;
|
||||
} else {
|
||||
parser.skipChildren(); // skip potential inner objects and arrays for forward compatibility
|
||||
}
|
||||
}
|
||||
if (key != null) {
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -48,6 +50,7 @@ import java.util.Comparator;
|
|||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -177,7 +180,16 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
List<Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
suggestions.add(Suggestion.fromXContent(parser));
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String currentField = parser.currentName();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
Suggestion<? extends Entry<? extends Option>> suggestion = Suggestion.fromXContent(parser);
|
||||
if (suggestion != null) {
|
||||
suggestions.add(suggestion);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField));
|
||||
}
|
||||
}
|
||||
return new Suggest(suggestions);
|
||||
}
|
||||
|
@ -386,14 +398,16 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Suggestion<? extends Entry<? extends Option>> fromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
return XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class);
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
|
||||
SetOnce<Suggestion> suggestion = new SetOnce<>();
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class, suggestion::set);
|
||||
return suggestion.get();
|
||||
}
|
||||
|
||||
protected static <E extends Suggestion.Entry<?>> void parseEntries(XContentParser parser, Suggestion<E> suggestion,
|
||||
CheckedFunction<XContentParser, E, IOException> entryParser)
|
||||
throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
suggestion.addTerm(entryParser.apply(parser));
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -52,34 +53,39 @@ public class XContentParserUtilsTests extends ESTestCase {
|
|||
final String delimiter = randomFrom("#", ":", "/", "-", "_", "|", "_delim_");
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
|
||||
final ObjectParser<SetOnce<Boolean>, Void> BOOLPARSER = new ObjectParser<>("bool", () -> new SetOnce<>());
|
||||
BOOLPARSER.declareBoolean(SetOnce::set, new ParseField("field"));
|
||||
final ObjectParser<SetOnce<Long>, Void> LONGPARSER = new ObjectParser<>("long", () -> new SetOnce<>());
|
||||
LONGPARSER.declareLong(SetOnce::set, new ParseField("field"));
|
||||
|
||||
List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>();
|
||||
namedXContents.add(new NamedXContentRegistry.Entry(Boolean.class, new ParseField("bool"), parser -> {
|
||||
ensureExpectedToken(XContentParser.Token.VALUE_BOOLEAN, parser.nextToken(), parser::getTokenLocation);
|
||||
return parser.booleanValue();
|
||||
}));
|
||||
namedXContents.add(new NamedXContentRegistry.Entry(Long.class, new ParseField("long"), parser -> {
|
||||
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, parser.nextToken(), parser::getTokenLocation);
|
||||
return parser.longValue();
|
||||
}));
|
||||
namedXContents.add(new NamedXContentRegistry.Entry(Boolean.class, new ParseField("bool"), p -> BOOLPARSER.parse(p, null).get()));
|
||||
namedXContents.add(new NamedXContentRegistry.Entry(Long.class, new ParseField("long"), p -> LONGPARSER.parse(p, null).get()));
|
||||
final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(namedXContents);
|
||||
|
||||
BytesReference bytes = toXContent((builder, params) -> builder.field("test", 0), xContentType, randomBoolean());
|
||||
BytesReference bytes = toXContent((builder, params) -> builder.startObject("name").field("field", 0).endObject(), xContentType,
|
||||
randomBoolean());
|
||||
try (XContentParser parser = xContentType.xContent().createParser(namedXContentRegistry, bytes)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseTypedKeysObject(parser, delimiter, Boolean.class));
|
||||
assertEquals("Cannot parse object of class [Boolean] without type information. Set [typed_keys] parameter " +
|
||||
"on the request to ensure the type information is added to the response output", e.getMessage());
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
SetOnce<Boolean> booleanConsumer = new SetOnce<>();
|
||||
parseTypedKeysObject(parser, delimiter, Boolean.class, booleanConsumer::set);
|
||||
// because of the missing type to identify the parser, we expect no return value, but also no exception
|
||||
assertNull(booleanConsumer.get());
|
||||
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
bytes = toXContent((builder, params) -> builder.field("type" + delimiter + "name", 0), xContentType, randomBoolean());
|
||||
bytes = toXContent((builder, params) -> builder.startObject("type" + delimiter + "name").field("bool", true).endObject(),
|
||||
xContentType, randomBoolean());
|
||||
try (XContentParser parser = xContentType.xContent().createParser(namedXContentRegistry, bytes)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
NamedXContentRegistry.UnknownNamedObjectException e = expectThrows(NamedXContentRegistry.UnknownNamedObjectException.class,
|
||||
() -> parseTypedKeysObject(parser, delimiter, Boolean.class));
|
||||
() -> parseTypedKeysObject(parser, delimiter, Boolean.class, a -> {}));
|
||||
assertEquals("Unknown Boolean [type]", e.getMessage());
|
||||
assertEquals("type", e.getName());
|
||||
assertEquals("java.lang.Boolean", e.getCategoryClass());
|
||||
|
@ -88,8 +94,8 @@ public class XContentParserUtilsTests extends ESTestCase {
|
|||
final long longValue = randomLong();
|
||||
final boolean boolValue = randomBoolean();
|
||||
bytes = toXContent((builder, params) -> {
|
||||
builder.field("long" + delimiter + "l", longValue);
|
||||
builder.field("bool" + delimiter + "b", boolValue);
|
||||
builder.startObject("long" + delimiter + "l").field("field", longValue).endObject();
|
||||
builder.startObject("bool" + delimiter + "l").field("field", boolValue).endObject();
|
||||
return builder;
|
||||
}, xContentType, randomBoolean());
|
||||
|
||||
|
@ -97,16 +103,49 @@ public class XContentParserUtilsTests extends ESTestCase {
|
|||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
Long parsedLong = parseTypedKeysObject(parser, delimiter, Long.class);
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
SetOnce<Long> parsedLong = new SetOnce<>();
|
||||
parseTypedKeysObject(parser, delimiter, Long.class, parsedLong::set);
|
||||
assertNotNull(parsedLong);
|
||||
assertEquals(longValue, parsedLong.longValue());
|
||||
assertEquals(longValue, parsedLong.get().longValue());
|
||||
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
Boolean parsedBoolean = parseTypedKeysObject(parser, delimiter, Boolean.class);
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
SetOnce<Boolean> parsedBoolean = new SetOnce<>();
|
||||
parseTypedKeysObject(parser, delimiter, Boolean.class, parsedBoolean::set);
|
||||
assertNotNull(parsedBoolean);
|
||||
assertEquals(boolValue, parsedBoolean);
|
||||
assertEquals(boolValue, parsedBoolean.get());
|
||||
|
||||
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseTypedKeysObjectErrors() throws IOException {
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
{
|
||||
BytesReference bytes = toXContent((builder, params) -> builder.startObject("name").field("field", 0).endObject(), xContentType,
|
||||
randomBoolean());
|
||||
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, bytes)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
ParsingException exception = expectThrows(ParsingException.class,
|
||||
() -> parseTypedKeysObject(parser, "#", Boolean.class, o -> {
|
||||
}));
|
||||
assertEquals("Failed to parse object: unexpected token [FIELD_NAME] found", exception.getMessage());
|
||||
}
|
||||
}
|
||||
{
|
||||
BytesReference bytes = toXContent((builder, params) -> builder.startObject("").field("field", 0).endObject(), xContentType,
|
||||
randomBoolean());
|
||||
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, bytes)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ParsingException exception = expectThrows(ParsingException.class,
|
||||
() -> parseTypedKeysObject(parser, "#", Boolean.class, o -> {
|
||||
}));
|
||||
assertEquals("Failed to parse object: empty key", exception.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,12 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -187,6 +190,22 @@ public class AggregationsTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testParsingExceptionOnUnknownAggregation() throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject("unknownAggregation");
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
BytesReference originalBytes = builder.bytes();
|
||||
try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
ParsingException ex = expectThrows(ParsingException.class, () -> Aggregations.fromXContent(parser));
|
||||
assertEquals("Could not parse aggregation keyed as [unknownAggregation]", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public final InternalAggregations createTestInstance() {
|
||||
return createTestInstance(1, 0, 5);
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public abstract class InternalMultiBucketAggregationTestCase<T extends InternalA
|
|||
|
||||
public void testIterators() throws IOException {
|
||||
final T aggregation = createTestInstance();
|
||||
assertMultiBucketsAggregations(aggregation, parseAndAssert(aggregation, false), true);
|
||||
assertMultiBucketsAggregations(aggregation, parseAndAssert(aggregation, false, false), true);
|
||||
}
|
||||
|
||||
private void assertMultiBucketsAggregations(Aggregation expected, Aggregation actual, boolean checkOrder) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
@ -29,6 +30,7 @@ import java.util.Arrays;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public abstract class AbstractPercentilesTestCase<T extends InternalAggregation & Iterable<Percentile>>
|
||||
extends InternalAggregationTestCase<T> {
|
||||
|
@ -62,7 +64,7 @@ public abstract class AbstractPercentilesTestCase<T extends InternalAggregation
|
|||
|
||||
public void testPercentilesIterators() throws IOException {
|
||||
final T aggregation = createTestInstance();
|
||||
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false);
|
||||
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false, false);
|
||||
|
||||
Iterator<Percentile> it = aggregation.iterator();
|
||||
Iterator<Percentile> parsedIt = parsedAggregation.iterator();
|
||||
|
@ -82,4 +84,9 @@ public abstract class AbstractPercentilesTestCase<T extends InternalAggregation
|
|||
}
|
||||
return percents;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.endsWith(CommonFields.VALUES.getPreferredName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
|
|
@ -24,11 +24,11 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
@ -39,6 +39,7 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class InternalScriptedMetricTests extends InternalAggregationTestCase<InternalScriptedMetric> {
|
||||
|
@ -185,4 +186,9 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase<Int
|
|||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.contains(CommonFields.VALUE.getPreferredName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
|
|||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -31,6 +32,7 @@ import java.util.Collections;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase.randomPercents;
|
||||
|
||||
|
@ -110,11 +112,16 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
|
|||
|
||||
public void testParsedAggregationIteratorOrder() throws IOException {
|
||||
final InternalPercentilesBucket aggregation = createTestInstance();
|
||||
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false);
|
||||
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false, false);
|
||||
Iterator<Percentile> it = aggregation.iterator();
|
||||
Iterator<Percentile> parsedIt = parsedAggregation.iterator();
|
||||
while (it.hasNext()) {
|
||||
assertEquals(it.next(), parsedIt.next());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.endsWith(CommonFields.VALUES.getPreferredName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,13 @@
|
|||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
|
@ -171,4 +174,22 @@ public class SuggestTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public void testParsingExceptionOnUnknownSuggestion() throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startArray("unknownSuggestion");
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
BytesReference originalBytes = builder.bytes();
|
||||
try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
ParsingException ex = expectThrows(ParsingException.class, () -> Suggest.fromXContent(parser));
|
||||
assertEquals("Could not parse suggestion keyed as [unknownSuggestion]", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -132,6 +132,7 @@ public class SuggestionTests extends ESTestCase {
|
|||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
parsed = Suggestion.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
||||
assertNull(parser.nextToken());
|
||||
|
@ -145,19 +146,18 @@ public class SuggestionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* test that we throw error if RestSearchAction.TYPED_KEYS_PARAM isn't set while rendering xContent
|
||||
* test that we parse nothing if RestSearchAction.TYPED_KEYS_PARAM isn't set while rendering xContent and we cannot find
|
||||
* suggestion type information
|
||||
*/
|
||||
public void testFromXContentFailsWithoutTypeParam() throws IOException {
|
||||
public void testFromXContentWithoutTypeParam() throws IOException {
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toXContent(createTestItem(), xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> Suggestion.fromXContent(parser));
|
||||
assertEquals(
|
||||
"Cannot parse object of class [Suggestion] without type information. "
|
||||
+ "Set [typed_keys] parameter on the request to ensure the type information "
|
||||
+ "is added to the response output", e.getMessage());
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
assertNull(Suggestion.fromXContent(parser));
|
||||
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,6 +177,7 @@ public class SuggestionTests extends ESTestCase {
|
|||
try (XContentParser parser = xContent.createParser(xContentRegistry(), suggestionString)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> Suggestion.fromXContent(parser));
|
||||
assertEquals("Unknown Suggestion [unknownType]", e.getMessage());
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.script.ScriptService;
|
|||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.matrix.stats.InternalMatrixStats.Fields;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
|
@ -38,6 +39,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class InternalMatrixStatsTests extends InternalAggregationTestCase<InternalMatrixStats> {
|
||||
|
||||
|
@ -170,4 +172,9 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
expectThrows(IllegalArgumentException.class, () -> matrix.getCorrelation(other, unknownField));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.endsWith(Fields.CORRELATION) || path.endsWith(Fields.COVARIANCE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -130,12 +131,14 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
||||
public abstract class InternalAggregationTestCase<T extends InternalAggregation> extends AbstractWireSerializingTestCase<T> {
|
||||
|
@ -297,7 +300,13 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
|||
|
||||
public final void testFromXContent() throws IOException {
|
||||
final T aggregation = createTestInstance();
|
||||
final Aggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean());
|
||||
final Aggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), false);
|
||||
assertFromXContent(aggregation, (ParsedAggregation) parsedAggregation);
|
||||
}
|
||||
|
||||
public final void testFromXContentWithRandomFields() throws IOException {
|
||||
final T aggregation = createTestInstance();
|
||||
final Aggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), true);
|
||||
assertFromXContent(aggregation, (ParsedAggregation) parsedAggregation);
|
||||
}
|
||||
|
||||
|
@ -305,7 +314,7 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <P extends ParsedAggregation> P parseAndAssert(final InternalAggregation aggregation,
|
||||
final boolean shuffled) throws IOException {
|
||||
final boolean shuffled, final boolean addRandomFields) throws IOException {
|
||||
|
||||
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
|
@ -317,29 +326,57 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
|||
} else {
|
||||
originalBytes = toXContent(aggregation, xContentType, params, humanReadable);
|
||||
}
|
||||
BytesReference mutated;
|
||||
if (addRandomFields) {
|
||||
/*
|
||||
* - we don't add to the root object because it should only contain
|
||||
* the named aggregation to test - we don't want to insert into the
|
||||
* "meta" object, because we pass on everything we find there
|
||||
*
|
||||
* - we don't want to directly insert anything random into "buckets"
|
||||
* objects, they are used with "keyed" aggregations and contain
|
||||
* named bucket objects. Any new named object on this level should
|
||||
* also be a bucket and be parsed as such.
|
||||
*/
|
||||
Predicate<String> basicExcludes = path -> path.isEmpty() || path.endsWith(Aggregation.CommonFields.META.getPreferredName())
|
||||
|| path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName());
|
||||
Predicate<String> excludes = basicExcludes.or(excludePathsFromXContentInsertion());
|
||||
mutated = insertRandomFields(xContentType, originalBytes, excludes, random());
|
||||
} else {
|
||||
mutated = originalBytes;
|
||||
}
|
||||
|
||||
Aggregation parsedAggregation;
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
|
||||
SetOnce<Aggregation> parsedAggregation = new SetOnce<>();
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
|
||||
|
||||
parsedAggregation = XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class);
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, parsedAggregation::set);
|
||||
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
||||
assertNull(parser.nextToken());
|
||||
|
||||
assertEquals(aggregation.getName(), parsedAggregation.getName());
|
||||
assertEquals(aggregation.getMetaData(), parsedAggregation.getMetaData());
|
||||
Aggregation agg = parsedAggregation.get();
|
||||
assertEquals(aggregation.getName(), agg.getName());
|
||||
assertEquals(aggregation.getMetaData(), agg.getMetaData());
|
||||
|
||||
assertTrue(parsedAggregation instanceof ParsedAggregation);
|
||||
assertEquals(aggregation.getType(), parsedAggregation.getType());
|
||||
assertTrue(agg instanceof ParsedAggregation);
|
||||
assertEquals(aggregation.getType(), agg.getType());
|
||||
|
||||
BytesReference parsedBytes = toXContent(agg, xContentType, params, humanReadable);
|
||||
assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
|
||||
|
||||
return (P) agg;
|
||||
}
|
||||
|
||||
BytesReference parsedBytes = toXContent(parsedAggregation, xContentType, params, humanReadable);
|
||||
assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
|
||||
}
|
||||
|
||||
return (P) parsedAggregation;
|
||||
/**
|
||||
* Overwrite this in your test if other than the basic xContent paths should be excluded during insertion of random fields
|
||||
*/
|
||||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> false;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,13 +32,13 @@ import org.elasticsearch.test.rest.yaml.ObjectPath;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Stack;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiOfLength;
|
||||
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
|
||||
|
@ -195,22 +195,20 @@ public final class XContentTestUtils {
|
|||
}
|
||||
}
|
||||
|
||||
try (XContentParser parser = createParser(NamedXContentRegistry.EMPTY, xContent, contentType)) {
|
||||
Supplier<Object> value = () -> {
|
||||
Supplier<Object> value = () -> {
|
||||
List<Object> randomValues = RandomObjects.randomStoredFieldValues(random, contentType).v1();
|
||||
if (random.nextBoolean()) {
|
||||
return randomValues.get(0);
|
||||
} else {
|
||||
if (random.nextBoolean()) {
|
||||
return RandomObjects.randomStoredFieldValues(random, contentType);
|
||||
return randomValues.stream().collect(Collectors.toMap(obj -> randomAsciiOfLength(random, 10), obj -> obj));
|
||||
} else {
|
||||
if (random.nextBoolean()) {
|
||||
return Collections.singletonMap(randomAsciiOfLength(random, 10), randomAsciiOfLength(random, 10));
|
||||
} else {
|
||||
return Collections.singletonList(randomAsciiOfLength(random, 10));
|
||||
}
|
||||
return randomValues;
|
||||
}
|
||||
};
|
||||
return XContentTestUtils
|
||||
.insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value)
|
||||
.bytes();
|
||||
}
|
||||
}
|
||||
};
|
||||
return XContentTestUtils
|
||||
.insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value).bytes();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -251,7 +249,8 @@ public final class XContentTestUtils {
|
|||
List<String> validPaths = new ArrayList<>();
|
||||
// parser.currentName() can be null for root object and unnamed objects in arrays
|
||||
if (parser.currentName() != null) {
|
||||
currentPath.push(parser.currentName());
|
||||
// dots in randomized field names need to be escaped, we use that character as the path separator
|
||||
currentPath.push(parser.currentName().replaceAll("\\.", "\\\\."));
|
||||
}
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
validPaths.add(String.join(".", currentPath.toArray(new String[currentPath.size()])));
|
||||
|
|
|
@ -61,7 +61,7 @@ public class XContentTestUtilsTests extends ESTestCase {
|
|||
builder.startObject("inner1");
|
||||
{
|
||||
builder.field("inner1field1", "value");
|
||||
builder.startObject("inner2");
|
||||
builder.startObject("inn.er2");
|
||||
{
|
||||
builder.field("inner2field1", "value");
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ public class XContentTestUtilsTests extends ESTestCase {
|
|||
assertThat(insertPaths, hasItem(equalTo("list1.2")));
|
||||
assertThat(insertPaths, hasItem(equalTo("list1.4")));
|
||||
assertThat(insertPaths, hasItem(equalTo("inner1")));
|
||||
assertThat(insertPaths, hasItem(equalTo("inner1.inner2")));
|
||||
assertThat(insertPaths, hasItem(equalTo("inner1.inn\\.er2")));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,19 +89,19 @@ public class XContentTestUtilsTests extends ESTestCase {
|
|||
builder.startObject();
|
||||
builder.endObject();
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""),
|
||||
() -> "inner1", () -> new HashMap<>());
|
||||
() -> "inn.er1", () -> new HashMap<>());
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""),
|
||||
() -> "field1", () -> "value1");
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList("inner1"),
|
||||
() -> "inner2", () -> new HashMap<>());
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList("inner1"),
|
||||
() -> "field2", () -> "value2");
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(),
|
||||
Collections.singletonList("inn\\.er1"), () -> "inner2", () -> new HashMap<>());
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(),
|
||||
Collections.singletonList("inn\\.er1"), () -> "field2", () -> "value2");
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, builder.bytes(), builder.contentType())) {
|
||||
Map<String, Object> map = parser.map();
|
||||
assertEquals(2, map.size());
|
||||
assertEquals("value1", map.get("field1"));
|
||||
assertThat(map.get("inner1"), instanceOf(Map.class));
|
||||
Map<String, Object> innerMap = (Map<String, Object>) map.get("inner1");
|
||||
assertThat(map.get("inn.er1"), instanceOf(Map.class));
|
||||
Map<String, Object> innerMap = (Map<String, Object>) map.get("inn.er1");
|
||||
assertEquals(2, innerMap.size());
|
||||
assertEquals("value2", innerMap.get("field2"));
|
||||
assertThat(innerMap.get("inner2"), instanceOf(Map.class));
|
||||
|
|
Loading…
Reference in New Issue