Disallow unquoted field names, fix testcases using unquoted JSON
This commit is contained in:
parent
bfc708ee58
commit
f157dae053
|
@ -50,7 +50,7 @@ public class JsonXContent implements XContent {
|
|||
|
||||
static {
|
||||
jsonFactory = new JsonFactory();
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, false);
|
||||
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
|
||||
jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
|
||||
|
|
|
@ -69,6 +69,6 @@ public class SimpleDataNodesIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
return "{ \"type1\" : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase {
|
|||
fail("can't index, does not match consistency");
|
||||
} catch (UnavailableShardsException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
|
||||
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
|
||||
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: [index {[test][type1][1], source[{ \"type1\" : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
|
||||
// but really, all is well
|
||||
}
|
||||
|
||||
|
@ -76,7 +76,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase {
|
|||
fail("can't index, does not match consistency");
|
||||
} catch (UnavailableShardsException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
|
||||
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
|
||||
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: [index {[test][type1][1], source[{ \"type1\" : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
|
||||
// but really, all is well
|
||||
}
|
||||
|
||||
|
@ -93,6 +93,6 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
return "{ \"type1\" : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -133,22 +133,22 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
|
|||
" \"bool\" : {\n";
|
||||
if (tempQueryBuilder.must().size() > 0) {
|
||||
QueryBuilder<?> must = tempQueryBuilder.must().get(0);
|
||||
contentString += "must: " + must.toString() + ",";
|
||||
contentString += "\"must\": " + must.toString() + ",";
|
||||
expectedQuery.must(must);
|
||||
}
|
||||
if (tempQueryBuilder.mustNot().size() > 0) {
|
||||
QueryBuilder<?> mustNot = tempQueryBuilder.mustNot().get(0);
|
||||
contentString += (randomBoolean() ? "must_not: " : "mustNot: ") + mustNot.toString() + ",";
|
||||
contentString += (randomBoolean() ? "\"must_not\": " : "\"mustNot\": ") + mustNot.toString() + ",";
|
||||
expectedQuery.mustNot(mustNot);
|
||||
}
|
||||
if (tempQueryBuilder.should().size() > 0) {
|
||||
QueryBuilder<?> should = tempQueryBuilder.should().get(0);
|
||||
contentString += "should: " + should.toString() + ",";
|
||||
contentString += "\"should\": " + should.toString() + ",";
|
||||
expectedQuery.should(should);
|
||||
}
|
||||
if (tempQueryBuilder.filter().size() > 0) {
|
||||
QueryBuilder<?> filter = tempQueryBuilder.filter().get(0);
|
||||
contentString += "filter: " + filter.toString() + ",";
|
||||
contentString += "\"filter\": " + filter.toString() + ",";
|
||||
expectedQuery.filter(filter);
|
||||
}
|
||||
contentString = contentString.substring(0, contentString.length() - 1);
|
||||
|
|
|
@ -108,6 +108,6 @@ public class SimpleRecoveryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
return "{ \"type1\" : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
{
|
||||
test1:{
|
||||
value1:"value1",
|
||||
test2:{
|
||||
value2:"value2",
|
||||
value3:2
|
||||
"test1":{
|
||||
"value1":"value1",
|
||||
"test2":{
|
||||
"value2":"value2",
|
||||
"value3":2
|
||||
},
|
||||
test3:["test3-1", "test3-2"]
|
||||
"test3":["test3-1", "test3-2"]
|
||||
}
|
||||
}
|
|
@ -1,9 +1,9 @@
|
|||
{
|
||||
person:{
|
||||
properties:{
|
||||
"person":{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type": "text",
|
||||
store:true
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,13 +14,13 @@
|
|||
},
|
||||
"not_indexed":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
},
|
||||
"not_indexed2":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
{
|
||||
person:{
|
||||
properties:{
|
||||
"person":{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type": "text",
|
||||
store:true,
|
||||
"store":true,
|
||||
"fields":{
|
||||
"not_indexed3":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
{
|
||||
person:{
|
||||
properties:{
|
||||
"person":{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type": "text",
|
||||
store:true,
|
||||
"store":true,
|
||||
"fields":{
|
||||
"indexed":{
|
||||
"type": "text"
|
||||
},
|
||||
"not_indexed":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
{
|
||||
person:{
|
||||
properties:{
|
||||
"person":{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type": "text",
|
||||
store:true,
|
||||
"store":true,
|
||||
"fields":{
|
||||
"indexed":{
|
||||
"type": "text"
|
||||
},
|
||||
"not_indexed":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
},
|
||||
"not_indexed2":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
{
|
||||
person:{
|
||||
properties:{
|
||||
"person":{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
"index":false,
|
||||
"fields":{
|
||||
"not_indexed3":{
|
||||
"type": "text",
|
||||
index:false,
|
||||
store:true
|
||||
"index":false,
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,79 +1,79 @@
|
|||
{
|
||||
person:{
|
||||
"person":{
|
||||
"_meta":{
|
||||
"param1":"value1"
|
||||
},
|
||||
date_formats:["yyyy-MM-dd", "dd-MM-yyyy"],
|
||||
dynamic:false,
|
||||
enabled:true,
|
||||
_source:{
|
||||
"date_formats":["yyyy-MM-dd", "dd-MM-yyyy"],
|
||||
"dynamic":false,
|
||||
"enabled":true,
|
||||
"_source":{
|
||||
},
|
||||
properties:{
|
||||
name:{
|
||||
type:"object",
|
||||
dynamic:false,
|
||||
properties:{
|
||||
first:{
|
||||
"properties":{
|
||||
"name":{
|
||||
"type":"object",
|
||||
"dynamic":false,
|
||||
"properties":{
|
||||
"first":{
|
||||
"type": "text",
|
||||
store:true
|
||||
"store":true
|
||||
},
|
||||
last:{
|
||||
"last":{
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
address:{
|
||||
type:"object",
|
||||
properties:{
|
||||
first:{
|
||||
properties:{
|
||||
location:{
|
||||
"address":{
|
||||
"type":"object",
|
||||
"properties":{
|
||||
"first":{
|
||||
"properties":{
|
||||
"location":{
|
||||
"type": "text",
|
||||
store:true
|
||||
"store":true
|
||||
}
|
||||
}
|
||||
},
|
||||
last:{
|
||||
properties:{
|
||||
location:{
|
||||
"last":{
|
||||
"properties":{
|
||||
"location":{
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
age:{
|
||||
type:"integer",
|
||||
null_value:0
|
||||
"age":{
|
||||
"type":"integer",
|
||||
"null_value":0
|
||||
},
|
||||
birthdate:{
|
||||
type:"date",
|
||||
format:"yyyy-MM-dd"
|
||||
"birthdate":{
|
||||
"type":"date",
|
||||
"format":"yyyy-MM-dd"
|
||||
},
|
||||
nerd:{
|
||||
type:"boolean"
|
||||
"nerd":{
|
||||
"type":"boolean"
|
||||
},
|
||||
dogs:{
|
||||
"dogs":{
|
||||
"type": "text"
|
||||
},
|
||||
complex:{
|
||||
type:"object",
|
||||
properties:{
|
||||
value1:{
|
||||
"complex":{
|
||||
"type":"object",
|
||||
"properties":{
|
||||
"value1":{
|
||||
"type": "text"
|
||||
},
|
||||
value2:{
|
||||
"value2":{
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
},
|
||||
complex2:{
|
||||
type:"object",
|
||||
properties:{
|
||||
value1:{
|
||||
"complex2":{
|
||||
"type":"object",
|
||||
"properties":{
|
||||
"value1":{
|
||||
"type": "text"
|
||||
},
|
||||
value2:{
|
||||
"value2":{
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,39 +1,39 @@
|
|||
{
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
"name":{
|
||||
"first":"shay",
|
||||
"last":"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
"address":{
|
||||
"first":{
|
||||
"location":"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
"last":{
|
||||
"location":"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
"age":32,
|
||||
"birthDate":"1977-11-15",
|
||||
"nerd":true,
|
||||
"dogs":["buck", "mia"],
|
||||
"complex":[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
},
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
"complex2":[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
"nullValue":null
|
||||
}
|
|
@ -1,40 +1,40 @@
|
|||
{
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
"_id":"1",
|
||||
"name":{
|
||||
"first":"shay",
|
||||
"last":"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
"address":{
|
||||
"first":{
|
||||
"location":"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
"last":{
|
||||
"location":"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
"age":32,
|
||||
"birthDate":"1977-11-15",
|
||||
"nerd":true,
|
||||
"dogs":["buck", "mia"],
|
||||
"complex":[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
},
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
"complex2":[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
"nullValue":null
|
||||
}
|
|
@ -1,39 +1,39 @@
|
|||
{
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
"name":{
|
||||
"first":"shay",
|
||||
"last":"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
"address":{
|
||||
"first":{
|
||||
"location":"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
"last":{
|
||||
"location":"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
"age":32,
|
||||
"birthDate":"1977-11-15",
|
||||
"nerd":true,
|
||||
"dogs":["buck", "mia"],
|
||||
"complex":[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
},
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
"complex2":[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
"value1":"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
"value2":"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
"nullValue":null
|
||||
}
|
||||
|
|
Binary file not shown.
|
@ -35,3 +35,8 @@ Node roles are now returned in a specific section, called `roles`, as part of
|
|||
nodes stats and nodes info response. The new section is an array that holds all
|
||||
the different roles that each node fulfills. In case the array is returned
|
||||
empty, that means that the node is a coordinating only node.
|
||||
|
||||
==== Forbid unquoted JSON
|
||||
|
||||
Previously, JSON documents were allowed with unquoted field names, which
|
||||
isn't strictly JSON and broke some Elasticsearch clients.
|
||||
|
|
|
@ -82,15 +82,15 @@
|
|||
pipeline: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
field_to_rename: "value",
|
||||
field_to_remove: "old_value",
|
||||
field_to_lowercase: "LOWERCASE",
|
||||
field_to_uppercase: "uppercase",
|
||||
field_to_trim: " trimmed ",
|
||||
field_to_split: "127-0-0-1",
|
||||
field_to_join: ["127","0","0","1"],
|
||||
field_to_convert: ["127","0","0","1"],
|
||||
field_to_gsub: "127-0-0-1"
|
||||
"field_to_rename": "value",
|
||||
"field_to_remove": "old_value",
|
||||
"field_to_lowercase": "LOWERCASE",
|
||||
"field_to_uppercase": "uppercase",
|
||||
"field_to_trim": " trimmed ",
|
||||
"field_to_split": "127-0-0-1",
|
||||
"field_to_join": ["127","0","0","1"],
|
||||
"field_to_convert": ["127","0","0","1"],
|
||||
"field_to_gsub": "127-0-0-1"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
pipeline: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
values: ["foo", "bar", "baz"]
|
||||
"values": ["foo", "bar", "baz"]
|
||||
}
|
||||
|
||||
- do:
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
search:
|
||||
index: test
|
||||
filter_path: "*"
|
||||
body: "{ query: { match_all: {} } }"
|
||||
body: "{ \"query\": { \"match_all\": {} } }"
|
||||
|
||||
- gte: { took: 0 }
|
||||
- is_true: _shards.total
|
||||
|
@ -41,7 +41,7 @@
|
|||
search:
|
||||
index: test
|
||||
filter_path: "took"
|
||||
body: "{ query: { match_all: {} } }"
|
||||
body: "{ \"query\": { \"match_all\": {} } }"
|
||||
|
||||
- is_true: took
|
||||
- is_false: _shards.total
|
||||
|
@ -57,7 +57,7 @@
|
|||
search:
|
||||
index: test
|
||||
filter_path: "_shards.*"
|
||||
body: "{ query: { match_all: {} } }"
|
||||
body: "{ \"query\": { \"match_all\": {} } }"
|
||||
|
||||
- is_false: took
|
||||
- is_true: _shards.total
|
||||
|
@ -73,7 +73,7 @@
|
|||
search:
|
||||
index: test
|
||||
filter_path: [ "hits.**._i*", "**.total" ]
|
||||
body: "{ query: { match_all: {} } }"
|
||||
body: "{ \"query\": { \"match_all\": {} } }"
|
||||
|
||||
- is_false: took
|
||||
- is_true: _shards.total
|
||||
|
|
|
@ -160,8 +160,8 @@ public class DoSectionParserTests extends AbstractParserTestCase {
|
|||
parser = YamlXContent.yamlXContent.createParser(
|
||||
"search:\n" +
|
||||
" body:\n" +
|
||||
" _source: [ include.field1, include.field2 ]\n" +
|
||||
" query: { match_all: {} }"
|
||||
" \"_source\": [ include.field1, include.field2 ]\n" +
|
||||
" \"query\": { \"match_all\": {} }"
|
||||
);
|
||||
String body = "{ \"_source\": [ \"include.field1\", \"include.field2\" ], \"query\": { \"match_all\": {} }}";
|
||||
|
||||
|
@ -281,7 +281,7 @@ public class DoSectionParserTests extends AbstractParserTestCase {
|
|||
" index: test_1\n" +
|
||||
" type: test\n" +
|
||||
" id: 1\n" +
|
||||
" body: \"{ _source: true, query: { match_all: {} } }\""
|
||||
" body: \"{ \\\"_source\\\": true, \\\"query\\\": { \\\"match_all\\\": {} } }\""
|
||||
);
|
||||
|
||||
DoSectionParser doSectionParser = new DoSectionParser();
|
||||
|
@ -297,14 +297,14 @@ public class DoSectionParserTests extends AbstractParserTestCase {
|
|||
assertThat(apiCallSection.hasBody(), equalTo(true));
|
||||
assertThat(apiCallSection.getBodies().size(), equalTo(1));
|
||||
//stringified body is taken as is
|
||||
assertJsonEquals(apiCallSection.getBodies().get(0), "{ _source: true, query: { match_all: {} } }");
|
||||
assertJsonEquals(apiCallSection.getBodies().get(0), "{ \"_source\": true, \"query\": { \"match_all\": {} } }");
|
||||
}
|
||||
|
||||
public void testParseDoSectionWithBodiesStringifiedAndNot() throws Exception {
|
||||
parser = YamlXContent.yamlXContent.createParser(
|
||||
"index:\n" +
|
||||
" body:\n" +
|
||||
" - \"{ _source: true, query: { match_all: {} } }\"\n" +
|
||||
" - \"{ \\\"_source\\\": true, \\\"query\\\": { \\\"match_all\\\": {} } }\"\n" +
|
||||
" - { size: 100, query: { match_all: {} } }"
|
||||
);
|
||||
|
||||
|
@ -319,7 +319,7 @@ public class DoSectionParserTests extends AbstractParserTestCase {
|
|||
assertThat(apiCallSection.hasBody(), equalTo(true));
|
||||
assertThat(apiCallSection.getBodies().size(), equalTo(2));
|
||||
//stringified body is taken as is
|
||||
assertJsonEquals(apiCallSection.getBodies().get(0), "{ _source: true, query: { match_all: {} } }");
|
||||
assertJsonEquals(apiCallSection.getBodies().get(0), "{ \"_source\": true, \"query\": { \"match_all\": {} } }");
|
||||
assertJsonEquals(apiCallSection.getBodies().get(1), body);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue