[TEST] improve yaml test sections parsing (#23407)

Throw error when skip or do sections are malformed, such as they don't start with the proper token (START_OBJECT). That signals bad indentation, which would be ignored otherwise. Thanks (or due to) our pull parsing code, we were still able to properly parse the sections, yet other runners weren't able to.

Closes #21980

* [TEST] fix indentation in matrix_stats yaml tests

* [TEST] fix indentation in painless yaml test

* [TEST] fix indentation in analysis yaml tests

* [TEST] fix indentation in generated docs yaml tests

* [TEST] fix indentation in multi_cluster_search yaml tests
This commit is contained in:
Luca Cavanna 2017-03-02 12:43:20 +01:00 committed by GitHub
parent 15c936ec02
commit cc65a94fd4
19 changed files with 574 additions and 538 deletions

View File

@ -176,28 +176,28 @@ buildRestTests.docs = fileTree(projectDir) {
Closure setupTwitter = { String name, int count ->
buildRestTests.setups[name] = '''
- do:
indices.create:
index: twitter
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
tweet:
properties:
user:
type: keyword
doc_values: true
date:
type: date
likes:
type: long
indices.create:
index: twitter
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
tweet:
properties:
user:
type: keyword
doc_values: true
date:
type: date
likes:
type: long
- do:
bulk:
index: twitter
type: tweet
refresh: true
body: |'''
bulk:
index: twitter
type: tweet
refresh: true
body: |'''
for (int i = 0; i < count; i++) {
String user, text
if (i == 0) {
@ -208,8 +208,8 @@ Closure setupTwitter = { String name, int count ->
text = "some message with the number $i"
}
buildRestTests.setups[name] += """
{"index":{"_id": "$i"}}
{"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}"""
{"index":{"_id": "$i"}}
{"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}"""
}
}
setupTwitter('twitter', 5)
@ -231,79 +231,79 @@ buildRestTests.setups['host'] = '''
// Used by scripted metric docs
buildRestTests.setups['ledger'] = '''
- do:
indices.create:
index: ledger
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
sale:
properties:
type:
type: keyword
amount:
type: double
indices.create:
index: ledger
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
sale:
properties:
type:
type: keyword
amount:
type: double
- do:
bulk:
index: ledger
type: item
refresh: true
body: |
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 200, "type": "sale", "description": "something"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 10, "type": "expense", "decription": "another thing"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 150, "type": "sale", "description": "blah"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 50, "type": "expense", "description": "cost of blah"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 50, "type": "expense", "description": "advertisement"}'''
bulk:
index: ledger
type: item
refresh: true
body: |
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 200, "type": "sale", "description": "something"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 10, "type": "expense", "decription": "another thing"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 150, "type": "sale", "description": "blah"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 50, "type": "expense", "description": "cost of blah"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "amount": 50, "type": "expense", "description": "advertisement"}'''
// Used by aggregation docs
buildRestTests.setups['sales'] = '''
- do:
indices.create:
index: sales
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
sale:
properties:
type:
type: keyword
indices.create:
index: sales
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
sale:
properties:
type:
type: keyword
- do:
bulk:
index: sales
type: sale
refresh: true
body: |
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "t-shirt"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 150, "promoted": true, "rating": 5, "type": "bag"}
{"index":{}}
{"date": "2015/02/01 00:00:00", "price": 50, "promoted": false, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/02/01 00:00:00", "price": 10, "promoted": true, "rating": 4, "type": "t-shirt"}
{"index":{}}
{"date": "2015/03/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/03/01 00:00:00", "price": 175, "promoted": false, "rating": 2, "type": "t-shirt"}'''
bulk:
index: sales
type: sale
refresh: true
body: |
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "t-shirt"}
{"index":{}}
{"date": "2015/01/01 00:00:00", "price": 150, "promoted": true, "rating": 5, "type": "bag"}
{"index":{}}
{"date": "2015/02/01 00:00:00", "price": 50, "promoted": false, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/02/01 00:00:00", "price": 10, "promoted": true, "rating": 4, "type": "t-shirt"}
{"index":{}}
{"date": "2015/03/01 00:00:00", "price": 200, "promoted": true, "rating": 1, "type": "hat"}
{"index":{}}
{"date": "2015/03/01 00:00:00", "price": 175, "promoted": false, "rating": 2, "type": "t-shirt"}'''
// Dummy bank account data used by getting-started.asciidoc
buildRestTests.setups['bank'] = '''
- do:
bulk:
index: bank
type: account
refresh: true
body: |
bulk:
index: bank
type: account
refresh: true
body: |
#bank_data#
'''
/* Load the actual accounts only if we're going to use them. This complicates
@ -314,35 +314,35 @@ buildRestTests.inputs.file(accountsFile)
buildRestTests.doFirst {
String accounts = accountsFile.getText('UTF-8')
// Indent like a yaml test needs
accounts = accounts.replaceAll('(?m)^', ' ')
accounts = accounts.replaceAll('(?m)^', ' ')
buildRestTests.setups['bank'] =
buildRestTests.setups['bank'].replace('#bank_data#', accounts)
}
buildRestTests.setups['range_index'] = '''
- do :
indices.create:
index: range_index
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
my_type:
properties:
expected_attendees:
type: integer_range
time_frame:
type: date_range
format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis
indices.create:
index: range_index
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
my_type:
properties:
expected_attendees:
type: integer_range
time_frame:
type: date_range
format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis
- do:
bulk:
index: range_index
type: my_type
refresh: true
body: |
{"index":{"_id": 1}}
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
bulk:
index: range_index
type: my_type
refresh: true
body: |
{"index":{"_id": 1}}
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
// Used by index boost doc
buildRestTests.setups['index_boost'] = '''
@ -361,25 +361,25 @@ buildRestTests.setups['index_boost'] = '''
// Used by sampler and diversified-sampler aggregation docs
buildRestTests.setups['stackoverflow'] = '''
- do:
indices.create:
index: stackoverflow
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
question:
properties:
author:
type: keyword
tags:
type: keyword
indices.create:
index: stackoverflow
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
question:
properties:
author:
type: keyword
tags:
type: keyword
- do:
bulk:
index: stackoverflow
type: question
refresh: true
body: |'''
bulk:
index: stackoverflow
type: question
refresh: true
body: |'''
// Make Kibana strongly connected to elasticsearch and logstash
// Make Kibana rarer (and therefore higher-ranking) than Javascript
@ -388,28 +388,28 @@ buildRestTests.setups['stackoverflow'] = '''
for (int i = 0; i < 150; i++) {
buildRestTests.setups['stackoverflow'] += """
{"index":{}}
{"author": "very_relevant_$i", "tags": ["elasticsearch", "kibana"]}"""
{"index":{}}
{"author": "very_relevant_$i", "tags": ["elasticsearch", "kibana"]}"""
}
for (int i = 0; i < 50; i++) {
buildRestTests.setups['stackoverflow'] += """
{"index":{}}
{"author": "very_relevant_$i", "tags": ["logstash", "kibana"]}"""
{"index":{}}
{"author": "very_relevant_$i", "tags": ["logstash", "kibana"]}"""
}
for (int i = 0; i < 200; i++) {
buildRestTests.setups['stackoverflow'] += """
{"index":{}}
{"author": "partially_relevant_$i", "tags": ["javascript", "jquery"]}"""
{"index":{}}
{"author": "partially_relevant_$i", "tags": ["javascript", "jquery"]}"""
}
for (int i = 0; i < 200; i++) {
buildRestTests.setups['stackoverflow'] += """
{"index":{}}
{"author": "partially_relevant_$i", "tags": ["javascript", "angular"]}"""
{"index":{}}
{"author": "partially_relevant_$i", "tags": ["javascript", "angular"]}"""
}
for (int i = 0; i < 50; i++) {
buildRestTests.setups['stackoverflow'] += """
{"index":{}}
{"author": "noisy author", "tags": ["elasticsearch", "cabana"]}"""
{"index":{}}
{"author": "noisy author", "tags": ["elasticsearch", "cabana"]}"""
}
buildRestTests.setups['stackoverflow'] += """
"""
@ -417,24 +417,24 @@ buildRestTests.setups['stackoverflow'] += """
// Used by some aggregations
buildRestTests.setups['exams'] = '''
- do:
indices.create:
index: exams
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
exam:
properties:
grade:
type: byte
indices.create:
index: exams
body:
settings:
number_of_shards: 1
number_of_replicas: 1
mappings:
exam:
properties:
grade:
type: byte
- do:
bulk:
index: exams
type: exam
refresh: true
body: |
{"index":{}}
{"grade": 100}
{"index":{}}
{"grade": 50}'''
bulk:
index: exams
type: exam
refresh: true
body: |
{"index":{}}
{"grade": 100}
{"index":{}}
{"grade": 50}'''

View File

@ -1,41 +1,41 @@
---
"Empty Bucket Aggregation":
- do:
indices.create:
index: empty_bucket_idx
body:
settings:
number_of_shards: "3"
mappings:
test:
"properties":
"value":
"type": "integer"
"val1":
"type": "double"
indices.create:
index: empty_bucket_idx
body:
settings:
number_of_shards: "3"
mappings:
test:
"properties":
"value":
"type": "integer"
"val1":
"type": "double"
- do:
index:
index: empty_bucket_idx
type: test
id: 1
body: { "value": 0, "val1": 3.1 }
index:
index: empty_bucket_idx
type: test
id: 1
body: { "value": 0, "val1": 3.1 }
- do:
index:
index: empty_bucket_idx
type: test
id: 2
body: { "value": 2, "val1": -3.1 }
index:
index: empty_bucket_idx
type: test
id: 2
body: { "value": 2, "val1": -3.1 }
- do:
indices.refresh:
index: [empty_bucket_idx]
indices.refresh:
index: [empty_bucket_idx]
- do:
search:
index: empty_bucket_idx
type: test
search:
index: empty_bucket_idx
type: test
- match: {hits.total: 2}

View File

@ -2,122 +2,122 @@
setup:
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 3
mappings:
test:
"properties":
"val1":
"type": "double"
"val2":
"type": "double"
"val3":
"type": "double"
indices.create:
index: test
body:
settings:
number_of_shards: 3
mappings:
test:
"properties":
"val1":
"type": "double"
"val2":
"type": "double"
"val3":
"type": "double"
- do:
indices.create:
index: unmapped
body:
settings:
number_of_shards: 3
indices.create:
index: unmapped
body:
settings:
number_of_shards: 3
- do:
index:
index: test
type: test
id: 1
body: { "val1": 1.9, "val2": 3.1, "val3": 2.3 }
index:
index: test
type: test
id: 1
body: { "val1": 1.9, "val2": 3.1, "val3": 2.3 }
- do:
index:
index: test
type: test
id: 2
body: { "val1": -5.2, "val2": -3.4, "val3": 2.3}
index:
index: test
type: test
id: 2
body: { "val1": -5.2, "val2": -3.4, "val3": 2.3}
- do:
index:
index: test
type: test
id: 3
body: { "val1": -5.2, "val3": 2.3}
index:
index: test
type: test
id: 3
body: { "val1": -5.2, "val3": 2.3}
- do:
index:
index: test
type: test
id: 4
body: { "val1": 18.3, "val2": 104.4, "val3": 2.3}
index:
index: test
type: test
id: 4
body: { "val1": 18.3, "val2": 104.4, "val3": 2.3}
- do:
index:
index: test
type: test
id: 5
body: { "val1": -53.2, "val2": -322.4, "val3": 2.3}
index:
index: test
type: test
id: 5
body: { "val1": -53.2, "val2": -322.4, "val3": 2.3}
- do:
index:
index: test
type: test
id: 6
body: { "val1": -578.9, "val2": 69.9, "val3": 2.3}
index:
index: test
type: test
id: 6
body: { "val1": -578.9, "val2": 69.9, "val3": 2.3}
- do:
index:
index: test
type: test
id: 7
body: { "val1": 16.2, "val2": 17.2, "val3": 2.3}
index:
index: test
type: test
id: 7
body: { "val1": 16.2, "val2": 17.2, "val3": 2.3}
- do:
index:
index: test
type: test
id: 8
body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3}
index:
index: test
type: test
id: 8
body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3}
- do:
index:
index: test
type: test
id: 9
body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3}
index:
index: test
type: test
id: 9
body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3}
- do:
index:
index: test
type: test
id: 10
body: { "val1": 782.7, "val2": 789.7, "val3": 2.3}
index:
index: test
type: test
id: 10
body: { "val1": 782.7, "val2": 789.7, "val3": 2.3}
- do:
index:
index: test
type: test
id: 11
body: { "val1": -1.2, "val2": 6.3, "val3": 2.3}
index:
index: test
type: test
id: 11
body: { "val1": -1.2, "val2": 6.3, "val3": 2.3}
- do:
index:
index: test
type: test
id: 12
body: { "val1": 0, "val2": 1.11, "val3": 2.3}
index:
index: test
type: test
id: 12
body: { "val1": 0, "val2": 1.11, "val3": 2.3}
- do:
index:
index: test
type: test
id: 13
body: { "val1": 0.1, "val2": 0.92, "val3": 2.3}
index:
index: test
type: test
id: 13
body: { "val1": 0.1, "val2": 0.92, "val3": 2.3}
- do:
index:
index: test
type: test
id: 14
body: { "val1": 0.12, "val2": -82.4, "val3": 2.3}
index:
index: test
type: test
id: 14
body: { "val1": 0.12, "val2": -82.4, "val3": 2.3}
- do:
index:
index: test
type: test
id: 15
body: { "val1": 98.2, "val2": 32.4, "val3": 2.3}
index:
index: test
type: test
id: 15
body: { "val1": 98.2, "val2": 32.4, "val3": 2.3}
- do:
indices.refresh:
index: [test, unmapped]
indices.refresh:
index: [test, unmapped]
- do:
cluster.health:

View File

@ -2,122 +2,122 @@
setup:
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 3
mappings:
test:
"properties":
"val1":
"type": "double"
"val2":
"type": "double"
"val3":
"type": "double"
indices.create:
index: test
body:
settings:
number_of_shards: 3
mappings:
test:
"properties":
"val1":
"type": "double"
"val2":
"type": "double"
"val3":
"type": "double"
- do:
indices.create:
index: unmapped
body:
settings:
number_of_shards: 3
indices.create:
index: unmapped
body:
settings:
number_of_shards: 3
- do:
index:
index: test
type: test
id: 1
body: { "val1": 1.9, "val2": 3.1, "val3": 2.3, "vals" : [1.9, 16.143] }
index:
index: test
type: test
id: 1
body: { "val1": 1.9, "val2": 3.1, "val3": 2.3, "vals" : [1.9, 16.143] }
- do:
index:
index: test
type: test
id: 2
body: { "val1": -5.2, "val2": -3.4, "val3": 2.3, "vals" : [155, 16.23]}
index:
index: test
type: test
id: 2
body: { "val1": -5.2, "val2": -3.4, "val3": 2.3, "vals" : [155, 16.23]}
- do:
index:
index: test
type: test
id: 3
body: { "val1": -5.2, "val3": 2.3, "vals" : [-455, -32.32]}
index:
index: test
type: test
id: 3
body: { "val1": -5.2, "val3": 2.3, "vals" : [-455, -32.32]}
- do:
index:
index: test
type: test
id: 4
body: { "val1": 18.3, "val2": 104.4, "val3": 2.3, "vals" : [0.14, 92.1]}
index:
index: test
type: test
id: 4
body: { "val1": 18.3, "val2": 104.4, "val3": 2.3, "vals" : [0.14, 92.1]}
- do:
index:
index: test
type: test
id: 5
body: { "val1": -53.2, "val2": -322.4, "val3": 2.3, "vals" : [16, 16]}
index:
index: test
type: test
id: 5
body: { "val1": -53.2, "val2": -322.4, "val3": 2.3, "vals" : [16, 16]}
- do:
index:
index: test
type: test
id: 6
body: { "val1": -578.9, "val2": 69.9, "val3": 2.3}
index:
index: test
type: test
id: 6
body: { "val1": -578.9, "val2": 69.9, "val3": 2.3}
- do:
index:
index: test
type: test
id: 7
body: { "val1": 16.2, "val2": 17.2, "val3": 2.3, "vals" : [1234.3, -3433]}
index:
index: test
type: test
id: 7
body: { "val1": 16.2, "val2": 17.2, "val3": 2.3, "vals" : [1234.3, -3433]}
- do:
index:
index: test
type: test
id: 8
body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3, "vals" : [177.2, -93.333]}
index:
index: test
type: test
id: 8
body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3, "vals" : [177.2, -93.333]}
- do:
index:
index: test
type: test
id: 9
body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3, "vals" : [-29.9, 163.0]}
index:
index: test
type: test
id: 9
body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3, "vals" : [-29.9, 163.0]}
- do:
index:
index: test
type: test
id: 10
body: { "val1": 782.7, "val2": 789.7, "val3": 2.3, "vals" : [-0.2, 1343.3]}
index:
index: test
type: test
id: 10
body: { "val1": 782.7, "val2": 789.7, "val3": 2.3, "vals" : [-0.2, 1343.3]}
- do:
index:
index: test
type: test
id: 11
body: { "val1": -1.2, "val2": 6.3, "val3": 2.3, "vals" : [15.3, 16.9]}
index:
index: test
type: test
id: 11
body: { "val1": -1.2, "val2": 6.3, "val3": 2.3, "vals" : [15.3, 16.9]}
- do:
index:
index: test
type: test
id: 12
body: { "val1": 0, "val2": 1.11, "val3": 2.3, "vals" : [-644.4, -644.4]}
index:
index: test
type: test
id: 12
body: { "val1": 0, "val2": 1.11, "val3": 2.3, "vals" : [-644.4, -644.4]}
- do:
index:
index: test
type: test
id: 13
body: { "val1": 0.1, "val2": 0.92, "val3": 2.3, "vals" : [73.2, 0.12]}
index:
index: test
type: test
id: 13
body: { "val1": 0.1, "val2": 0.92, "val3": 2.3, "vals" : [73.2, 0.12]}
- do:
index:
index: test
type: test
id: 14
body: { "val1": 0.12, "val2": -82.4, "val3": 2.3, "vals" : [-0.001, 1295.3]}
index:
index: test
type: test
id: 14
body: { "val1": 0.12, "val2": -82.4, "val3": 2.3, "vals" : [-0.001, 1295.3]}
- do:
index:
index: test
type: test
id: 15
body: { "val1": 98.2, "val2": 32.4, "val3": 2.3, "vals" : [15.5, 16.5]}
index:
index: test
type: test
id: 15
body: { "val1": 98.2, "val2": 32.4, "val3": 2.3, "vals" : [15.5, 16.5]}
- do:
indices.refresh:
index: [test, unmapped]
indices.refresh:
index: [test, unmapped]
- do:
cluster.health:

View File

@ -2,22 +2,22 @@
#
setup:
- do:
indices.create:
index: test
body:
mappings:
test:
properties:
foo:
type: keyword
missing:
type: keyword
date:
type: date
format: yyyy/MM/dd
dates:
type: date
format: yyyy/MM/dd
indices.create:
index: test
body:
mappings:
test:
properties:
foo:
type: keyword
missing:
type: keyword
date:
type: date
format: yyyy/MM/dd
dates:
type: date
format: yyyy/MM/dd
- do:
index:
index: test

View File

@ -26,13 +26,13 @@
analyzer: my_analyzer
- do:
index:
index: test
type: type
id: 1
body: { "text": "Bâton enflammé" }
index:
index: test
type: type
id: 1
body: { "text": "Bâton enflammé" }
- do:
indices.refresh: {}
indices.refresh: {}
- do:
search:

View File

@ -26,13 +26,13 @@
analyzer: my_analyzer
- do:
index:
index: phonetic_sample
type: type
id: 1
body: { "text": "hello world" }
index:
index: phonetic_sample
type: type
id: 1
body: { "text": "hello world" }
- do:
indices.refresh: {}
indices.refresh: {}
- do:
search:

View File

@ -14,13 +14,13 @@
analyzer: smartcn
- do:
index:
index: test
type: type
id: 1
body: { "text": "我购买了道具和服装" }
index:
index: test
type: type
id: 1
body: { "text": "我购买了道具和服装" }
- do:
indices.refresh: {}
indices.refresh: {}
- do:
search:

View File

@ -14,13 +14,13 @@
analyzer: polish
- do:
index:
index: test
type: type
id: 1
body: { "text": "studenta był" }
index:
index: test
type: type
id: 1
body: { "text": "studenta był" }
- do:
indices.refresh: {}
indices.refresh: {}
- do:
search:

View File

@ -14,13 +14,13 @@
analyzer: ukrainian
- do:
index:
index: test
type: type
id: 1
body: { "text": "Ця п'єса у свою чергу рухається по колу." }
index:
index: test
type: type
id: 1
body: { "text": "Ця п'єса у свою чергу рухається по колу." }
- do:
indices.refresh: {}
indices.refresh: {}
- do:
search:

View File

@ -2,37 +2,37 @@
"Index data and search on the mixed cluster":
- do:
indices.create:
index: test_index
body:
settings:
index:
number_of_shards: 2
number_of_replicas: 0
indices.create:
index: test_index
body:
settings:
index:
number_of_shards: 2
number_of_replicas: 0
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
bulk:
refresh: true
body:
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "local_cluster", "filter_field": 0}'
- do:
search:
index: test_index,my_remote_cluster:test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
search:
index: test_index,my_remote_cluster:test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
- match: { _shards.total: 5 }
- match: { hits.total: 11 }
@ -43,16 +43,16 @@
- match: { aggregations.cluster.buckets.1.doc_count: 5 }
- do:
search:
index: test_index,my_remote_cluster:test_index
body:
query:
term:
f1: remote_cluster
aggs:
cluster:
terms:
field: f1.keyword
search:
index: test_index,my_remote_cluster:test_index
body:
query:
term:
f1: remote_cluster
aggs:
cluster:
terms:
field: f1.keyword
- match: { _shards.total: 5 }
- match: { hits.total: 6}
@ -62,13 +62,13 @@
- match: { aggregations.cluster.buckets.0.doc_count: 6 }
- do:
search:
index: my_remote_cluster:test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
search:
index: my_remote_cluster:test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
- match: { _shards.total: 3 }
- match: { hits.total: 6}
@ -78,13 +78,13 @@
- match: { aggregations.cluster.buckets.0.doc_count: 6 }
- do:
search:
index: test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
search:
index: test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
- match: { _shards.total: 2 }
- match: { hits.total: 5}
@ -96,23 +96,23 @@
---
"Add transient remote cluster based on the preset cluster":
- do:
cluster.get_settings:
include_defaults: true
cluster.get_settings:
include_defaults: true
- set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip }
- do:
cluster.put_settings:
flat_settings: true
body:
transient:
search.remote.test_remote_cluster.seeds: $remote_ip
cluster.put_settings:
flat_settings: true
body:
transient:
search.remote.test_remote_cluster.seeds: $remote_ip
- match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}}
- do:
search:
index: test_remote_cluster:test_index
search:
index: test_remote_cluster:test_index
- match: { _shards.total: 3 }
- match: { hits.total: 6 }
@ -122,8 +122,8 @@
"Search an filtered alias on the remote cluster":
- do:
search:
index: my_remote_cluster:aliased_test_index
search:
index: my_remote_cluster:aliased_test_index
- match: { _shards.total: 3 }
- match: { hits.total: 2 }

View File

@ -2,45 +2,44 @@
"Index data and search on the old cluster":
- do:
indices.create:
index: test_index
body:
settings:
index:
number_of_shards: 3
number_of_replicas: 0
aliases:
aliased_test_index: # we use this alias in the multi cluster test to very filtered aliases work
filter:
term:
filter_field : 1
indices.create:
index: test_index
body:
settings:
index:
number_of_shards: 3
number_of_replicas: 0
aliases:
aliased_test_index: # we use this alias in the multi cluster test to very filtered aliases work
filter:
term:
filter_field : 1
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
bulk:
refresh: true
body:
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 1}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "remote_cluster", "filter_field": 0}'
- do:
search:
index: test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
search:
index: test_index
body:
aggs:
cluster:
terms:
field: f1.keyword
- match: { _shards.total: 3 }
- match: { hits.total: 6 }
@ -49,8 +48,8 @@
- match: { aggregations.cluster.buckets.0.doc_count: 6 }
- do:
search:
index: aliased_test_index
search:
index: aliased_test_index
- match: { _shards.total: 3 }
- match: { hits.total: 2 }

View File

@ -36,17 +36,15 @@ public class ClientYamlTestSection implements Comparable<ClientYamlTestSection>
try {
parser.nextToken();
testSection.setSkipSection(SkipSection.parseIfNext(parser));
while (parser.currentToken() != XContentParser.Token.END_ARRAY) {
ParserUtils.advanceToFieldName(parser);
testSection.addExecutableSection(ExecutableSection.parse(parser));
}
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
throw new IllegalArgumentException("malformed section [" + testSection.getName() + "] expected ["
+ XContentParser.Token.END_OBJECT + "] but was [" + parser.currentToken() + "]");
}
parser.nextToken();
assert parser.currentToken() == XContentParser.Token.END_OBJECT : "malformed section [" + testSection.getName() + "] expected "
+ XContentParser.Token.END_OBJECT + " but was " + parser.currentToken();
parser.nextToken();
return testSection;
} catch (Exception e) {
throw new ParsingException(parser.getTokenLocation(), "Error parsing test named [" + testSection.getName() + "]", e);

View File

@ -86,6 +86,11 @@ public class DoSection implements ExecutableSection {
Map<String, String> headers = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
List<String> expectedWarnings = new ArrayList<>();
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("expected [" + XContentParser.Token.START_OBJECT + "], " +
"found [" + parser.currentToken() + "], the do section is not properly indented");
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();

View File

@ -54,7 +54,6 @@ public class SetupSection {
throw new IllegalArgumentException("section [" + parser.currentName() + "] not supported within setup section");
}
parser.nextToken();
setupSection.addDoSection(DoSection.parse(parser));
parser.nextToken();
}

View File

@ -52,12 +52,15 @@ public class SkipSection {
}
public static SkipSection parse(XContentParser parser) throws IOException {
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Expected [" + XContentParser.Token.START_OBJECT +
", found [" + parser.currentToken() + "], the skip section is not properly indented");
}
String currentFieldName = null;
XContentParser.Token token;
String version = null;
String reason = null;
List<String> features = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -90,7 +93,6 @@ public class SkipSection {
if (Strings.hasLength(version) && !Strings.hasLength(reason)) {
throw new ParsingException(parser.getTokenLocation(), "reason is mandatory within skip version section");
}
return new SkipSection(version, features, reason);
}

View File

@ -54,7 +54,6 @@ public class TeardownSection {
"section [" + parser.currentName() + "] not supported within teardown section");
}
parser.nextToken();
teardownSection.addDoSection(DoSection.parse(parser));
parser.nextToken();
}

View File

@ -20,9 +20,12 @@
package org.elasticsearch.test.rest.yaml.section;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import java.io.IOException;
import java.util.Map;
import static java.util.Collections.singletonList;
@ -60,6 +63,38 @@ public class ClientYamlTestSectionTests extends AbstractClientYamlTestFragmentPa
+ " [warnings] section can skip the test at line [" + lineNumber + "]", e.getMessage());
}
public void testWrongIndentation() throws Exception {
{
XContentParser parser = createParser(YamlXContent.yamlXContent,
"\"First test section\": \n" +
" - skip:\n" +
" version: \"2.0.0 - 2.2.0\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"");
ParsingException e = expectThrows(ParsingException.class, () -> ClientYamlTestSection.parse(parser));
assertEquals("Error parsing test named [First test section]", e.getMessage());
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
assertEquals("Expected [START_OBJECT, found [VALUE_NULL], the skip section is not properly indented",
e.getCause().getMessage());
}
{
XContentParser parser = createParser(YamlXContent.yamlXContent,
"\"First test section\": \n" +
" - do :\n" +
" catch: missing\n" +
" indices.get_warmer:\n" +
" index: test_index\n" +
" name: test_warmer"
);
ParsingException e = expectThrows(ParsingException.class, () -> ClientYamlTestSection.parse(parser));
assertEquals("Error parsing test named [First test section]", e.getMessage());
assertThat(e.getCause(), instanceOf(IOException.class));
assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class));
assertEquals("expected [START_OBJECT], found [VALUE_NULL], the do section is not properly indented",
e.getCause().getCause().getMessage());
}
}
public void testParseTestSectionWithDoSection() throws Exception {
parser = createParser(YamlXContent.yamlXContent,
"\"First test section\": \n" +

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import org.elasticsearch.test.VersionUtils;
import java.util.Arrays;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
@ -35,20 +34,20 @@ import static org.hamcrest.Matchers.nullValue;
public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase {
public void testSkip() {
SkipSection section = new SkipSection("2.0.0 - 2.1.0", randomBoolean() ? Collections.emptyList() :
Arrays.asList("warnings"), "foobar");
SkipSection section = new SkipSection("2.0.0 - 2.1.0",
randomBoolean() ? Collections.emptyList() : Collections.singletonList("warnings"), "foobar");
assertFalse(section.skip(Version.CURRENT));
assertTrue(section.skip(Version.V_2_0_0));
section = new SkipSection(randomBoolean() ? null : "2.0.0 - 2.1.0", Arrays.asList("boom"), "foobar");
section = new SkipSection(randomBoolean() ? null : "2.0.0 - 2.1.0", Collections.singletonList("boom"), "foobar");
assertTrue(section.skip(Version.CURRENT));
}
public void testMessage() {
SkipSection section = new SkipSection("2.0.0 - 2.1.0", Arrays.asList("warnings"), "foobar");
SkipSection section = new SkipSection("2.0.0 - 2.1.0", Collections.singletonList("warnings"), "foobar");
assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR"));
section = new SkipSection(null, Arrays.asList("warnings"), "foobar");
section = new SkipSection(null, Collections.singletonList("warnings"), "foobar");
assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR"));
section = new SkipSection(null, Arrays.asList("warnings"), null);
section = new SkipSection(null, Collections.singletonList("warnings"), null);
assertEquals("[FOOBAR] skipped, unsupported features [warnings]", section.getSkipMessage("FOOBAR"));
}
@ -118,7 +117,7 @@ public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCa
SkipSection skipSection = SkipSection.parse(parser);
assertEquals(VersionUtils.getFirstVersion(), skipSection.getLowerVersion());
assertEquals(Version.fromString("0.90.2"), skipSection.getUpperVersion());
assertEquals(Arrays.asList("regex"), skipSection.getFeatures());
assertEquals(Collections.singletonList("regex"), skipSection.getFeatures());
assertEquals("Delete ignores the parent param", skipSection.getReason());
}