Remove hand-coded XContent duplicate checks
With this commit we cleanup hand-coded duplicate checks in XContent parsing. They were necessary previously but since we reconfigured the underlying parser in #22073 and #22225, these checks are obsolete and were also ineffective unless an undocumented system property has been set. As we also remove this escape hatch, we can remove the additional checks as well. Closes #22253 Relates #34588
This commit is contained in:
parent
e498b7d437
commit
dbb6fe58fa
|
@ -216,7 +216,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
int position = constructorArgInfos.size();
|
||||
boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER;
|
||||
constructorArgInfos.add(new ConstructorArgInfo(parseField, required));
|
||||
objectParser.declareField((target, v) -> target.constructorArg(position, parseField, v), parser, parseField, type);
|
||||
objectParser.declareField((target, v) -> target.constructorArg(position, v), parser, parseField, type);
|
||||
} else {
|
||||
numberOfFields += 1;
|
||||
objectParser.declareField(queueingConsumer(consumer, parseField), parser, parseField, type);
|
||||
|
@ -249,7 +249,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
int position = constructorArgInfos.size();
|
||||
boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER;
|
||||
constructorArgInfos.add(new ConstructorArgInfo(parseField, required));
|
||||
objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, parseField, v), namedObjectParser, parseField);
|
||||
objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, v), namedObjectParser, parseField);
|
||||
} else {
|
||||
numberOfFields += 1;
|
||||
objectParser.declareNamedObjects(queueingConsumer(consumer, parseField), namedObjectParser, parseField);
|
||||
|
@ -285,7 +285,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
int position = constructorArgInfos.size();
|
||||
boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER;
|
||||
constructorArgInfos.add(new ConstructorArgInfo(parseField, required));
|
||||
objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, parseField, v), namedObjectParser,
|
||||
objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, v), namedObjectParser,
|
||||
wrapOrderedModeCallBack(orderedModeCallback), parseField);
|
||||
} else {
|
||||
numberOfFields += 1;
|
||||
|
@ -395,10 +395,7 @@ public final class ConstructingObjectParser<Value, Context> extends AbstractObje
|
|||
/**
|
||||
* Set a constructor argument and build the target object if all constructor arguments have arrived.
|
||||
*/
|
||||
private void constructorArg(int position, ParseField parseField, Object value) {
|
||||
if (constructorArgs[position] != null) {
|
||||
throw new IllegalArgumentException("Can't repeat param [" + parseField + "]");
|
||||
}
|
||||
private void constructorArg(int position, Object value) {
|
||||
constructorArgs[position] = value;
|
||||
constructorArgsCollected++;
|
||||
if (constructorArgsCollected == constructorArgInfos.size()) {
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.Booleans;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
@ -32,28 +30,6 @@ import java.util.Set;
|
|||
* A generic abstraction on top of handling content, inspired by JSON and pull parsing.
|
||||
*/
|
||||
public interface XContent {
|
||||
|
||||
/*
|
||||
* NOTE: This comment is only meant for maintainers of the Elasticsearch code base and is intentionally not a Javadoc comment as it
|
||||
* describes an undocumented system property.
|
||||
*
|
||||
*
|
||||
* Determines whether the XContent parser will always check for duplicate keys. This behavior is enabled by default but
|
||||
* can be disabled by setting the otherwise undocumented system property "es.xcontent.strict_duplicate_detection to "false".
|
||||
*
|
||||
* Before we've enabled this mode, we had custom duplicate checks in various parts of the code base. As the user can still disable this
|
||||
* mode and fall back to the legacy duplicate checks, we still need to keep the custom duplicate checks around and we also need to keep
|
||||
* the tests around.
|
||||
*
|
||||
* If this fallback via system property is removed one day in the future you can remove all tests that call this method and also remove
|
||||
* the corresponding custom duplicate check code.
|
||||
*
|
||||
*/
|
||||
static boolean isStrictDuplicateDetectionEnabled() {
|
||||
// Don't allow duplicate keys in JSON content by default but let the user opt out
|
||||
return Booleans.parseBoolean(System.getProperty("es.xcontent.strict_duplicate_detection", "true"), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* The type this content handles and produces.
|
||||
*/
|
||||
|
|
|
@ -56,7 +56,7 @@ public class CborXContent implements XContent {
|
|||
cborFactory.configure(CBORFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
|
||||
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method
|
||||
cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
|
||||
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
|
||||
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
|
||||
cborXContent = new CborXContent();
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ public class JsonXContent implements XContent {
|
|||
jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
|
||||
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
|
||||
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
|
||||
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
|
||||
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
|
||||
jsonXContent = new JsonXContent();
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class SmileXContent implements XContent {
|
|||
smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
|
||||
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
|
||||
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
|
||||
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
|
||||
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
|
||||
smileXContent = new SmileXContent();
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class YamlXContent implements XContent {
|
|||
|
||||
static {
|
||||
yamlFactory = new YAMLFactory();
|
||||
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
|
||||
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
|
||||
yamlXContent = new YamlXContent();
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import static org.hamcrest.Matchers.anyOf;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ConstructingObjectParserTests extends ESTestCase {
|
||||
|
@ -164,21 +163,6 @@ public class ConstructingObjectParserTests extends ESTestCase {
|
|||
assertEquals((Integer) 2, parsed.vegetable);
|
||||
}
|
||||
|
||||
public void testRepeatedConstructorParam() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent,
|
||||
"{\n"
|
||||
+ " \"vegetable\": 1,\n"
|
||||
+ " \"vegetable\": 2\n"
|
||||
+ "}");
|
||||
Throwable e = expectThrows(XContentParseException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null));
|
||||
assertEquals("[has_required_arguments] failed to parse field [vegetable]", e.getMessage());
|
||||
e = e.getCause();
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Can't repeat param [vegetable]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBadParam() throws IOException {
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent,
|
||||
"{\n"
|
||||
|
|
|
@ -712,21 +712,21 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
}
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, list, builder, parser, allowNullValues);
|
||||
validateValue(key, list, parser, allowNullValues);
|
||||
builder.putList(key, list);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, null, builder, parser, allowNullValues);
|
||||
validateValue(key, null, parser, allowNullValues);
|
||||
builder.putNull(key);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING
|
||||
|| parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
String key = keyBuilder.toString();
|
||||
String value = parser.text();
|
||||
validateValue(key, value, builder, parser, allowNullValues);
|
||||
validateValue(key, value, parser, allowNullValues);
|
||||
builder.put(key, value);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, parser.text(), builder, parser, allowNullValues);
|
||||
validateValue(key, parser.text(), parser, allowNullValues);
|
||||
builder.put(key, parser.booleanValue());
|
||||
} else {
|
||||
XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation());
|
||||
|
@ -734,19 +734,7 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
}
|
||||
|
||||
private static void validateValue(String key, Object currentValue, Settings.Builder builder, XContentParser parser,
|
||||
boolean allowNullValues) {
|
||||
if (builder.map.containsKey(key)) {
|
||||
throw new ElasticsearchParseException(
|
||||
"duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber,
|
||||
builder.map.get(key),
|
||||
currentValue
|
||||
);
|
||||
}
|
||||
|
||||
private static void validateValue(String key, Object currentValue, XContentParser parser, boolean allowNullValues) {
|
||||
if (currentValue == null && allowNullValues == false) {
|
||||
throw new ElasticsearchParseException(
|
||||
"null-valued setting found for key [{}] found at line number [{}], column number [{}]",
|
||||
|
|
|
@ -98,10 +98,6 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (INNER_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
if (queryFound) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ConstantScoreQueryBuilder.NAME + "]"
|
||||
+ " accepts only one 'filter' element.");
|
||||
}
|
||||
query = parseInnerQueryBuilder(parser);
|
||||
queryFound = true;
|
||||
} else {
|
||||
|
|
|
@ -26,13 +26,11 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -553,32 +551,6 @@ public class SettingsTests extends ESTestCase {
|
|||
assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
final String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
|
||||
final SettingsException e = expectThrows(SettingsException.class,
|
||||
() -> Settings.builder().loadFromSource(json, XContentType.JSON).build());
|
||||
assertThat(
|
||||
e.toString(),
|
||||
CoreMatchers.containsString("duplicate settings key [foo] " +
|
||||
"found at line number [1], " +
|
||||
"column number [20], " +
|
||||
"previous value [bar], " +
|
||||
"current value [baz]"));
|
||||
|
||||
String yaml = "foo: bar\nfoo: baz";
|
||||
SettingsException e1 = expectThrows(SettingsException.class, () -> {
|
||||
Settings.builder().loadFromSource(yaml, XContentType.YAML);
|
||||
});
|
||||
assertEquals(e1.getCause().getClass(), ElasticsearchParseException.class);
|
||||
String msg = e1.getCause().getMessage();
|
||||
assertTrue(
|
||||
msg,
|
||||
msg.contains("duplicate settings key [foo] found at line number [2], column number [6], " +
|
||||
"previous value [bar], current value [baz]"));
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
// this is just terrible but it's the existing behavior!
|
||||
Settings test = Settings.builder().putList("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
|
||||
|
|
|
@ -1138,9 +1138,6 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testChecksForDuplicates() throws Exception {
|
||||
assumeTrue("Test only makes sense if XContent parser has strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
|
||||
XContentBuilder builder = builder()
|
||||
.startObject()
|
||||
.field("key", 1)
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -332,30 +331,6 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
|
|||
assertEquals("no [query] registered for [unknown_query]", ex.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* test that two queries in object throws error
|
||||
*/
|
||||
public void testTooManyQueriesInObject() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
String clauseType = randomFrom("must", "should", "must_not", "filter");
|
||||
// should also throw error if invalid query is preceded by a valid one
|
||||
String query = "{\n" +
|
||||
" \"bool\": {\n" +
|
||||
" \"" + clauseType + "\": {\n" +
|
||||
" \"match\": {\n" +
|
||||
" \"foo\": \"bar\"\n" +
|
||||
" },\n" +
|
||||
" \"match\": {\n" +
|
||||
" \"baz\": \"buzz\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParsingException ex = expectThrows(ParsingException.class, () -> parseQuery(query));
|
||||
assertEquals("[match] malformed query, expected [END_OBJECT] but found [FIELD_NAME]", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testRewrite() throws IOException {
|
||||
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
|
||||
boolean mustRewrite = false;
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
|
@ -62,20 +61,6 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
assertThat(e.getMessage(), containsString("requires a 'filter' element"));
|
||||
}
|
||||
|
||||
/**
|
||||
* test that multiple "filter" elements causes {@link ParsingException}
|
||||
*/
|
||||
public void testMultipleFilterElements() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {\n" +
|
||||
"\"filter\" : { \"term\": { \"foo\": \"a\" } },\n" +
|
||||
"\"filter\" : { \"term\": { \"foo\": \"x\" } },\n" +
|
||||
"} }";
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryString));
|
||||
assertThat(e.getMessage(), containsString("accepts only one 'filter' element"));
|
||||
}
|
||||
|
||||
/**
|
||||
* test that "filter" does not accept an array of queries, throws {@link ParsingException}
|
||||
*/
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
|
|||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -726,27 +725,6 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
expectParsingException(json, equalTo("[bool] malformed query, expected [END_OBJECT] but found [FIELD_NAME]"));
|
||||
}
|
||||
|
||||
public void testMalformedQueryMultipleQueryElements() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
String json = "{\n" +
|
||||
" \"function_score\":{\n" +
|
||||
" \"query\":{\n" +
|
||||
" \"bool\":{\n" +
|
||||
" \"must\":{\"match\":{\"field\":\"value\"}}" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"query\":{\n" +
|
||||
" \"bool\":{\n" +
|
||||
" \"must\":{\"match\":{\"field\":\"value\"}}" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
expectParsingException(json, "[query] is already defined.");
|
||||
}
|
||||
|
||||
private void expectParsingException(String json, Matcher<String> messageMatcher) {
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertThat(e.getMessage(), messageMatcher);
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -110,38 +109,6 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||
assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]"));
|
||||
}
|
||||
|
||||
public void testTwoAggs() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
XContentBuilder source = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
.startObject("by_date")
|
||||
.startObject("date_histogram")
|
||||
.field("field", "timestamp")
|
||||
.field("interval", "month")
|
||||
.endObject()
|
||||
.startObject("aggs")
|
||||
.startObject("tag_count")
|
||||
.startObject("cardinality")
|
||||
.field("field", "tag")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("aggs") // 2nd "aggs": illegal
|
||||
.startObject("tag_count2")
|
||||
.startObject("cardinality")
|
||||
.field("field", "tag")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentParser parser = createParser(source);
|
||||
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
|
||||
assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]"));
|
||||
}
|
||||
|
||||
public void testInvalidAggregationName() throws Exception {
|
||||
Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher("");
|
||||
String name;
|
||||
|
@ -176,29 +143,6 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||
assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]"));
|
||||
}
|
||||
|
||||
public void testSameAggregationName() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
final String name = randomAlphaOfLengthBetween(1, 10);
|
||||
XContentBuilder source = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
.startObject(name)
|
||||
.startObject("terms")
|
||||
.field("field", "a")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject(name)
|
||||
.startObject("terms")
|
||||
.field("field", "b")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentParser parser = createParser(source);
|
||||
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
|
||||
assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]"));
|
||||
}
|
||||
|
||||
public void testMissingName() throws Exception {
|
||||
XContentBuilder source = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.spell.LuceneLevenshteinDistance;
|
|||
import org.apache.lucene.search.spell.NGramDistance;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
|
@ -161,15 +160,6 @@ public class DirectCandidateGeneratorTests extends ESTestCase {
|
|||
assertIllegalXContent(directGenerator, IllegalArgumentException.class,
|
||||
"Required [field]");
|
||||
|
||||
// test two fieldnames
|
||||
if (XContent.isStrictDuplicateDetectionEnabled()) {
|
||||
logger.info("Skipping test as it uses a custom duplicate check that is obsolete when strict duplicate checks are enabled.");
|
||||
} else {
|
||||
directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
|
||||
assertIllegalXContent(directGenerator, IllegalArgumentException.class,
|
||||
"[direct_generator] failed to parse field [field]");
|
||||
}
|
||||
|
||||
// test unknown field
|
||||
directGenerator = "{ \"unknown_param\" : \"f1\" }";
|
||||
assertIllegalXContent(directGenerator, IllegalArgumentException.class,
|
||||
|
|
|
@ -81,9 +81,6 @@ public class ClientYamlSuiteRestApiParser {
|
|||
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "parts".equals(currentFieldName)) {
|
||||
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
String part = parser.currentName();
|
||||
if (restApi.getPathParts().containsKey(part)) {
|
||||
throw new IllegalArgumentException("Found duplicate part [" + part + "]");
|
||||
}
|
||||
parser.nextToken();
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Expected parts field in rest api definition to contain an object");
|
||||
|
@ -94,12 +91,7 @@ public class ClientYamlSuiteRestApiParser {
|
|||
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "params".equals(currentFieldName)) {
|
||||
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
|
||||
String param = parser.currentName();
|
||||
if (restApi.getParams().containsKey(param)) {
|
||||
throw new IllegalArgumentException("Found duplicate param [" + param + "]");
|
||||
}
|
||||
|
||||
parser.nextToken();
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Expected params field in rest api definition to contain an object");
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.test.rest.yaml.restspec;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -71,72 +70,6 @@ public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
|
|||
"}", "ping.json", "Found duplicate path [/pingtwo]");
|
||||
}
|
||||
|
||||
public void testDuplicateParts() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
parseAndExpectFailure("{\n" +
|
||||
" \"ping\": {" +
|
||||
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||
" \"methods\": [\"PUT\"]," +
|
||||
" \"url\": {" +
|
||||
" \"path\": \"/\"," +
|
||||
" \"paths\": [\"/\"]," +
|
||||
" \"parts\": {" +
|
||||
" \"index\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"index part\"\n" +
|
||||
" }," +
|
||||
" \"type\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"type part\"\n" +
|
||||
" }," +
|
||||
" \"index\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"index parameter part\"\n" +
|
||||
" }" +
|
||||
" }," +
|
||||
" \"params\": {" +
|
||||
" \"type\" : \"boolean\",\n" +
|
||||
" \"description\" : \"Whether specified concrete indices should be ignored when unavailable (missing or closed)\"" +
|
||||
" }" +
|
||||
" }," +
|
||||
" \"body\": null" +
|
||||
" }" +
|
||||
"}", "ping.json", "Found duplicate part [index]");
|
||||
}
|
||||
|
||||
public void testDuplicateParams() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
parseAndExpectFailure("{\n" +
|
||||
" \"ping\": {" +
|
||||
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||
" \"methods\": [\"PUT\"]," +
|
||||
" \"url\": {" +
|
||||
" \"path\": \"/\"," +
|
||||
" \"paths\": [\"/\"]," +
|
||||
" \"parts\": {" +
|
||||
" }," +
|
||||
" \"params\": {" +
|
||||
" \"timeout\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"timeout parameter\"\n" +
|
||||
" }," +
|
||||
" \"refresh\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"refresh parameter\"\n" +
|
||||
" }," +
|
||||
" \"timeout\": {" +
|
||||
" \"type\" : \"string\",\n" +
|
||||
" \"description\" : \"timeout parameter again\"\n" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }," +
|
||||
" \"body\": null" +
|
||||
" }" +
|
||||
"}", "ping.json", "Found duplicate param [timeout]");
|
||||
}
|
||||
|
||||
public void testBrokenSpecShouldThrowUsefulExceptionWhenParsingFailsOnParams() throws Exception {
|
||||
parseAndExpectFailure(BROKEN_SPEC_PARAMS, "ping.json", "Expected params field in rest api definition to contain an object");
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.client.Node;
|
||||
import org.elasticsearch.client.NodeSelector;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
|
@ -214,37 +213,6 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase
|
|||
assertThat(apiCallSection.getBodies().size(), equalTo(4));
|
||||
}
|
||||
|
||||
public void testParseDoSectionWithJsonMultipleBodiesRepeatedProperty() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
|
||||
String[] bodies = new String[] {
|
||||
"{ \"index\": { \"_index\":\"test_index\", \"_type\":\"test_type\", \"_id\":\"test_id\" } }",
|
||||
"{ \"f1\":\"v1\", \"f2\":42 }",
|
||||
};
|
||||
parser = createParser(YamlXContent.yamlXContent,
|
||||
"bulk:\n" +
|
||||
" refresh: true\n" +
|
||||
" body: \n" +
|
||||
" " + bodies[0] + "\n" +
|
||||
" body: \n" +
|
||||
" " + bodies[1]
|
||||
);
|
||||
|
||||
DoSection doSection = DoSection.parse(parser);
|
||||
ApiCallSection apiCallSection = doSection.getApiCallSection();
|
||||
|
||||
assertThat(apiCallSection, notNullValue());
|
||||
assertThat(apiCallSection.getApi(), equalTo("bulk"));
|
||||
assertThat(apiCallSection.getParams().size(), equalTo(1));
|
||||
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
|
||||
assertThat(apiCallSection.hasBody(), equalTo(true));
|
||||
assertThat(apiCallSection.getBodies().size(), equalTo(bodies.length));
|
||||
for (int i = 0; i < bodies.length; i++) {
|
||||
assertJsonEquals(apiCallSection.getBodies().get(i), bodies[i]);
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseDoSectionWithYamlBody() throws Exception {
|
||||
parser = createParser(YamlXContent.yamlXContent,
|
||||
"search:\n" +
|
||||
|
@ -304,41 +272,6 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testParseDoSectionWithYamlMultipleBodiesRepeatedProperty() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
|
||||
parser = createParser(YamlXContent.yamlXContent,
|
||||
"bulk:\n" +
|
||||
" refresh: true\n" +
|
||||
" body:\n" +
|
||||
" index:\n" +
|
||||
" _index: test_index\n" +
|
||||
" _type: test_type\n" +
|
||||
" _id: test_id\n" +
|
||||
" body:\n" +
|
||||
" f1: v1\n" +
|
||||
" f2: 42\n"
|
||||
);
|
||||
String[] bodies = new String[2];
|
||||
bodies[0] = "{\"index\": {\"_index\": \"test_index\", \"_type\": \"test_type\", \"_id\": \"test_id\"}}";
|
||||
bodies[1] = "{ \"f1\":\"v1\", \"f2\": 42 }";
|
||||
|
||||
DoSection doSection = DoSection.parse(parser);
|
||||
ApiCallSection apiCallSection = doSection.getApiCallSection();
|
||||
|
||||
assertThat(apiCallSection, notNullValue());
|
||||
assertThat(apiCallSection.getApi(), equalTo("bulk"));
|
||||
assertThat(apiCallSection.getParams().size(), equalTo(1));
|
||||
assertThat(apiCallSection.getParams().get("refresh"), equalTo("true"));
|
||||
assertThat(apiCallSection.hasBody(), equalTo(true));
|
||||
assertThat(apiCallSection.getBodies().size(), equalTo(bodies.length));
|
||||
|
||||
for (int i = 0; i < bodies.length; i++) {
|
||||
assertJsonEquals(apiCallSection.getBodies().get(i), bodies[i]);
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseDoSectionWithYamlBodyMultiGet() throws Exception {
|
||||
parser = createParser(YamlXContent.yamlXContent,
|
||||
"mget:\n" +
|
||||
|
|
|
@ -69,7 +69,6 @@ public final class ArrayCompareCondition extends AbstractCompareCondition {
|
|||
String path = null;
|
||||
Op op = null;
|
||||
Object value = null;
|
||||
boolean haveValue = false;
|
||||
Quantifier quantifier = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -86,11 +85,6 @@ public final class ArrayCompareCondition extends AbstractCompareCondition {
|
|||
parser.nextToken();
|
||||
path = parser.text();
|
||||
} else {
|
||||
if (op != null) {
|
||||
throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. encountered " +
|
||||
"duplicate comparison operator, but already saw [{}].", TYPE, watchId, parser.currentName(), op
|
||||
.id());
|
||||
}
|
||||
try {
|
||||
op = Op.resolve(parser.currentName());
|
||||
} catch (IllegalArgumentException iae) {
|
||||
|
@ -102,11 +96,6 @@ public final class ArrayCompareCondition extends AbstractCompareCondition {
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
if (parser.currentName().equals("value")) {
|
||||
if (haveValue) {
|
||||
throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. " +
|
||||
"encountered duplicate field \"value\", but already saw value [{}].", TYPE,
|
||||
watchId, value);
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (!op.supportsStructures() && !token.isValue() && token != XContentParser.Token.VALUE_NULL) {
|
||||
throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. " +
|
||||
|
@ -115,13 +104,7 @@ public final class ArrayCompareCondition extends AbstractCompareCondition {
|
|||
op.name().toLowerCase(Locale.ROOT), token);
|
||||
}
|
||||
value = XContentUtils.readValue(parser, token);
|
||||
haveValue = true;
|
||||
} else if (parser.currentName().equals("quantifier")) {
|
||||
if (quantifier != null) {
|
||||
throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. " +
|
||||
"encountered duplicate field \"quantifier\", but already saw quantifier [{}].",
|
||||
TYPE, watchId, quantifier.id());
|
||||
}
|
||||
parser.nextToken();
|
||||
try {
|
||||
quantifier = Quantifier.resolve(parser.text());
|
||||
|
|
|
@ -11,19 +11,18 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class EmailAttachmentsParser {
|
||||
|
||||
private Map<String, EmailAttachmentParser> parsers;
|
||||
private final Map<String, EmailAttachmentParser> parsers;
|
||||
|
||||
public EmailAttachmentsParser(Map<String, EmailAttachmentParser> parsers) {
|
||||
this.parsers = Collections.unmodifiableMap(parsers);
|
||||
}
|
||||
|
||||
public EmailAttachments parse(XContentParser parser) throws IOException {
|
||||
Map<String, EmailAttachmentParser.EmailAttachment> attachments = new LinkedHashMap<>();
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -42,18 +41,14 @@ public class EmailAttachmentsParser {
|
|||
throw new ElasticsearchParseException("Cannot parse attachment of type [{}]", currentAttachmentType);
|
||||
}
|
||||
EmailAttachmentParser.EmailAttachment emailAttachment = emailAttachmentParser.parse(currentFieldName, parser);
|
||||
if (attachments.containsKey(emailAttachment.id())) {
|
||||
throw new ElasticsearchParseException("Attachment with id [{}] has already been created, must be renamed",
|
||||
emailAttachment.id());
|
||||
}
|
||||
attachments.put(emailAttachment.id(), emailAttachment);
|
||||
attachments.add(emailAttachment);
|
||||
// one further to skip the end_object from the attachment
|
||||
parser.nextToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new EmailAttachments(new ArrayList<>(attachments.values()));
|
||||
return new EmailAttachments(attachments);
|
||||
}
|
||||
|
||||
public Map<String, EmailAttachmentParser> getParsers() {
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.condition;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -195,36 +194,6 @@ public class ArrayCompareConditionTests extends ESTestCase {
|
|||
assertThat(condition.getQuantifier(), is(quantifier));
|
||||
}
|
||||
|
||||
public void testParseContainsDuplicateOperator() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
ArrayCompareCondition.Op op = randomFrom(ArrayCompareCondition.Op.values());
|
||||
ArrayCompareCondition.Quantifier quantifier = randomFrom(ArrayCompareCondition.Quantifier.values());
|
||||
Object value = randomFrom("value", 1, null);
|
||||
XContentBuilder builder =
|
||||
jsonBuilder().startObject()
|
||||
.startObject("key1.key2")
|
||||
.field("path", "key3.key4")
|
||||
.startObject(op.id())
|
||||
.field("value", value)
|
||||
.field("quantifier", quantifier.id())
|
||||
.endObject()
|
||||
.startObject(op.id())
|
||||
.field("value", value)
|
||||
.field("quantifier", quantifier.id())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
|
||||
parser.nextToken();
|
||||
|
||||
expectedException.expect(ElasticsearchParseException.class);
|
||||
expectedException.expectMessage("duplicate comparison operator");
|
||||
|
||||
ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser);
|
||||
}
|
||||
|
||||
public void testParseContainsUnknownOperator() throws IOException {
|
||||
ArrayCompareCondition.Quantifier quantifier = randomFrom(ArrayCompareCondition.Quantifier.values());
|
||||
Object value = randomFrom("value", 1, null);
|
||||
|
@ -248,60 +217,6 @@ public class ArrayCompareConditionTests extends ESTestCase {
|
|||
ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser);
|
||||
}
|
||||
|
||||
public void testParseContainsDuplicateValue() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
ArrayCompareCondition.Op op = randomFrom(ArrayCompareCondition.Op.values());
|
||||
ArrayCompareCondition.Quantifier quantifier = randomFrom(ArrayCompareCondition.Quantifier.values());
|
||||
Object value = randomFrom("value", 1, null);
|
||||
XContentBuilder builder =
|
||||
jsonBuilder().startObject()
|
||||
.startObject("key1.key2")
|
||||
.field("path", "key3.key4")
|
||||
.startObject(op.id())
|
||||
.field("value", value)
|
||||
.field("value", value)
|
||||
.field("quantifier", quantifier.id())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
|
||||
parser.nextToken();
|
||||
|
||||
expectedException.expect(ElasticsearchParseException.class);
|
||||
expectedException.expectMessage("duplicate field \"value\"");
|
||||
|
||||
ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser);
|
||||
}
|
||||
|
||||
public void testParseContainsDuplicateQuantifier() throws IOException {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
ArrayCompareCondition.Op op = randomFrom(ArrayCompareCondition.Op.values());
|
||||
ArrayCompareCondition.Quantifier quantifier = randomFrom(ArrayCompareCondition.Quantifier.values());
|
||||
Object value = randomFrom("value", 1, null);
|
||||
XContentBuilder builder =
|
||||
jsonBuilder().startObject()
|
||||
.startObject("key1.key2")
|
||||
.field("path", "key3.key4")
|
||||
.startObject(op.id())
|
||||
.field("value", value)
|
||||
.field("quantifier", quantifier.id())
|
||||
.field("quantifier", quantifier.id())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
|
||||
parser.nextToken();
|
||||
|
||||
expectedException.expect(ElasticsearchParseException.class);
|
||||
expectedException.expectMessage("duplicate field \"quantifier\"");
|
||||
|
||||
ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser);
|
||||
}
|
||||
|
||||
public void testParseContainsUnknownQuantifier() throws IOException {
|
||||
ArrayCompareCondition.Op op = randomFrom(ArrayCompareCondition.Op.values());
|
||||
Object value = randomFrom("value", 1, null);
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -29,7 +28,6 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
@ -115,42 +113,6 @@ public class EmailAttachmentParsersTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testThatTwoAttachmentsWithTheSameIdThrowError() throws Exception {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
Map<String, EmailAttachmentParser> parsers = new HashMap<>();
|
||||
parsers.put("test", new TestEmailAttachmentParser());
|
||||
EmailAttachmentsParser parser = new EmailAttachmentsParser(parsers);
|
||||
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
attachments.add(new TestEmailAttachment("my-name.json", "value"));
|
||||
attachments.add(new TestEmailAttachment("my-name.json", "value"));
|
||||
|
||||
EmailAttachments emailAttachments = new EmailAttachments(attachments);
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
builder.startObject();
|
||||
emailAttachments.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
logger.info("JSON is: " + Strings.toString(builder));
|
||||
|
||||
XContentParser xContentParser = createParser(builder);
|
||||
try {
|
||||
XContentParser.Token token = xContentParser.currentToken();
|
||||
assertNull(token);
|
||||
|
||||
token = xContentParser.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.START_OBJECT));
|
||||
|
||||
token = xContentParser.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.FIELD_NAME));
|
||||
|
||||
parser.parse(xContentParser);
|
||||
fail("Expected parser to fail but did not happen");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("Attachment with id [my-name.json] has already been created, must be renamed"));
|
||||
}
|
||||
}
|
||||
|
||||
public class TestEmailAttachmentParser implements EmailAttachmentParser<TestEmailAttachment> {
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue