Make sure to reject mappings with type _doc when include_type_name is false. (#38270)
`CreateIndexRequest#source(Map<String, Object>, ... )`, which is used when deserializing index creation requests, accidentally accepts mappings that are nested twice under the type key (as described in the bug report #38266). This in turn causes us to be too lenient in parsing typeless mappings. In particular, we accept the following index creation request, even though it should not contain the type key `_doc`: ``` PUT index?include_type_name=false { "mappings": { "_doc": { "properties": { ... } } } } ``` There is a similar issue for both 'put templates' and 'put mappings' requests as well. This PR makes the minimal changes to detect and reject these typed mappings in requests. It does not address #38266 generally, or attempt a larger refactor around types in these server-side requests, as I think this should be done at a later time.
This commit is contained in:
parent
8ebff0512b
commit
3ce7d2c9b6
|
@ -1235,12 +1235,10 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
createIndex.setJsonEntity(
|
createIndex.setJsonEntity(
|
||||||
"{\n" +
|
"{\n" +
|
||||||
" \"mappings\" : {\n" +
|
" \"mappings\" : {\n" +
|
||||||
" \"_doc\" : {\n" +
|
" \"properties\" : {\n" +
|
||||||
" \"properties\" : {\n" +
|
" \"message\" : {\n" +
|
||||||
" \"message\" : {\n" +
|
" \"type\": \"text\",\n" +
|
||||||
" \"type\": \"text\",\n" +
|
" \"store\": true\n" +
|
||||||
" \"store\": true\n" +
|
|
||||||
" }\n" +
|
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
|
@ -1764,12 +1762,10 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
createIndex.setJsonEntity(
|
createIndex.setJsonEntity(
|
||||||
"{\n" +
|
"{\n" +
|
||||||
" \"mappings\" : {\n" +
|
" \"mappings\" : {\n" +
|
||||||
" \"_doc\" : {\n" +
|
" \"properties\" : {\n" +
|
||||||
" \"properties\" : {\n" +
|
" \"foo\" : {\n" +
|
||||||
" \"foo\" : {\n" +
|
" \"type\": \"text\",\n" +
|
||||||
" \"type\": \"text\",\n" +
|
" \"store\": true\n" +
|
||||||
" \"store\": true\n" +
|
|
||||||
" }\n" +
|
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
|
|
|
@ -313,15 +313,13 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
||||||
{
|
{
|
||||||
request = new CreateIndexRequest("twitter2");
|
request = new CreateIndexRequest("twitter2");
|
||||||
//tag::create-index-mappings-map
|
//tag::create-index-mappings-map
|
||||||
Map<String, Object> jsonMap = new HashMap<>();
|
|
||||||
Map<String, Object> message = new HashMap<>();
|
Map<String, Object> message = new HashMap<>();
|
||||||
message.put("type", "text");
|
message.put("type", "text");
|
||||||
Map<String, Object> properties = new HashMap<>();
|
Map<String, Object> properties = new HashMap<>();
|
||||||
properties.put("message", message);
|
properties.put("message", message);
|
||||||
Map<String, Object> mapping = new HashMap<>();
|
Map<String, Object> mapping = new HashMap<>();
|
||||||
mapping.put("properties", properties);
|
mapping.put("properties", properties);
|
||||||
jsonMap.put("_doc", mapping);
|
request.mapping(mapping); // <1>
|
||||||
request.mapping(jsonMap); // <1>
|
|
||||||
//end::create-index-mappings-map
|
//end::create-index-mappings-map
|
||||||
CreateIndexResponse createIndexResponse = client.indices().create(request, RequestOptions.DEFAULT);
|
CreateIndexResponse createIndexResponse = client.indices().create(request, RequestOptions.DEFAULT);
|
||||||
assertTrue(createIndexResponse.isAcknowledged());
|
assertTrue(createIndexResponse.isAcknowledged());
|
||||||
|
@ -332,15 +330,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
||||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
{
|
{
|
||||||
builder.startObject("_doc");
|
builder.startObject("properties");
|
||||||
{
|
{
|
||||||
builder.startObject("properties");
|
builder.startObject("message");
|
||||||
{
|
{
|
||||||
builder.startObject("message");
|
builder.field("type", "text");
|
||||||
{
|
|
||||||
builder.field("type", "text");
|
|
||||||
}
|
|
||||||
builder.endObject();
|
|
||||||
}
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -381,10 +375,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
||||||
" \"number_of_replicas\" : 0\n" +
|
" \"number_of_replicas\" : 0\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"mappings\" : {\n" +
|
" \"mappings\" : {\n" +
|
||||||
" \"_doc\" : {\n" +
|
" \"properties\" : {\n" +
|
||||||
" \"properties\" : {\n" +
|
" \"message\" : { \"type\" : \"text\" }\n" +
|
||||||
" \"message\" : { \"type\" : \"text\" }\n" +
|
|
||||||
" }\n" +
|
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"aliases\" : {\n" +
|
" \"aliases\" : {\n" +
|
||||||
|
|
|
@ -98,19 +98,17 @@ Closure setupTwitter = { String name, int count ->
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
user:
|
||||||
user:
|
type: keyword
|
||||||
type: keyword
|
doc_values: true
|
||||||
doc_values: true
|
date:
|
||||||
date:
|
type: date
|
||||||
type: date
|
likes:
|
||||||
likes:
|
type: long
|
||||||
type: long
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: twitter
|
index: twitter
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |'''
|
body: |'''
|
||||||
for (int i = 0; i < count; i++) {
|
for (int i = 0; i < count; i++) {
|
||||||
|
@ -161,16 +159,14 @@ buildRestTests.setups['ledger'] = '''
|
||||||
number_of_shards: 2
|
number_of_shards: 2
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
type:
|
||||||
type:
|
type: keyword
|
||||||
type: keyword
|
amount:
|
||||||
amount:
|
type: double
|
||||||
type: double
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: ledger
|
index: ledger
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -194,14 +190,12 @@ buildRestTests.setups['sales'] = '''
|
||||||
number_of_shards: 2
|
number_of_shards: 2
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
type:
|
||||||
type:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: sales
|
index: sales
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -231,7 +225,6 @@ buildRestTests.setups['bank'] = '''
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: bank
|
index: bank
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
#bank_data#
|
#bank_data#
|
||||||
|
@ -273,16 +266,14 @@ buildRestTests.setups['stackoverflow'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
author:
|
||||||
author:
|
type: keyword
|
||||||
type: keyword
|
tags:
|
||||||
tags:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: stackoverflow
|
index: stackoverflow
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |'''
|
body: |'''
|
||||||
|
|
||||||
|
@ -328,16 +319,14 @@ buildRestTests.setups['news'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
source:
|
||||||
source:
|
type: keyword
|
||||||
type: keyword
|
content:
|
||||||
content:
|
type: text
|
||||||
type: text
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: news
|
index: news
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |'''
|
body: |'''
|
||||||
|
|
||||||
|
@ -381,14 +370,12 @@ buildRestTests.setups['exams'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
grade:
|
||||||
grade:
|
type: byte
|
||||||
type: byte
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: exams
|
index: exams
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -446,10 +433,9 @@ buildRestTests.setups['analyze_sample'] = '''
|
||||||
type: custom
|
type: custom
|
||||||
filter: [lowercase]
|
filter: [lowercase]
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
obj1.field1:
|
||||||
obj1.field1:
|
type: text'''
|
||||||
type: text'''
|
|
||||||
|
|
||||||
// Used by percentile/percentile-rank aggregations
|
// Used by percentile/percentile-rank aggregations
|
||||||
buildRestTests.setups['latency'] = '''
|
buildRestTests.setups['latency'] = '''
|
||||||
|
@ -461,14 +447,12 @@ buildRestTests.setups['latency'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
load_time:
|
||||||
load_time:
|
type: long
|
||||||
type: long
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: latency
|
index: latency
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |'''
|
body: |'''
|
||||||
|
|
||||||
|
@ -493,14 +477,12 @@ buildRestTests.setups['iprange'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 1
|
number_of_replicas: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
ip:
|
||||||
ip:
|
type: ip
|
||||||
type: ip
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: ip_addresses
|
index: ip_addresses
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |'''
|
body: |'''
|
||||||
|
|
||||||
|
@ -613,16 +595,15 @@ buildRestTests.setups['sensor_rollup_job'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
raw:
|
raw:
|
||||||
method: PUT
|
method: PUT
|
||||||
|
@ -664,21 +645,19 @@ buildRestTests.setups['sensor_started_rollup_job'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: sensor-1
|
index: sensor-1
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -740,26 +719,25 @@ buildRestTests.setups['sensor_index'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
load:
|
||||||
load:
|
type: double
|
||||||
type: double
|
net_in:
|
||||||
net_in:
|
type: long
|
||||||
type: long
|
net_out:
|
||||||
net_out:
|
type: long
|
||||||
type: long
|
hostname:
|
||||||
hostname:
|
type: keyword
|
||||||
type: keyword
|
datacenter:
|
||||||
datacenter:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
buildRestTests.setups['sensor_prefab_data'] = '''
|
buildRestTests.setups['sensor_prefab_data'] = '''
|
||||||
|
@ -771,16 +749,15 @@ buildRestTests.setups['sensor_prefab_data'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
indices.create:
|
indices.create:
|
||||||
index: sensor_rollup
|
index: sensor_rollup
|
||||||
|
@ -789,64 +766,62 @@ buildRestTests.setups['sensor_prefab_data'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
node.terms.value:
|
||||||
node.terms.value:
|
type: keyword
|
||||||
type: keyword
|
temperature.sum.value:
|
||||||
temperature.sum.value:
|
type: double
|
||||||
type: double
|
temperature.max.value:
|
||||||
temperature.max.value:
|
type: double
|
||||||
type: double
|
temperature.min.value:
|
||||||
temperature.min.value:
|
type: double
|
||||||
type: double
|
timestamp.date_histogram.time_zone:
|
||||||
timestamp.date_histogram.time_zone:
|
type: keyword
|
||||||
type: keyword
|
timestamp.date_histogram.interval:
|
||||||
timestamp.date_histogram.interval:
|
type: keyword
|
||||||
type: keyword
|
timestamp.date_histogram.timestamp:
|
||||||
timestamp.date_histogram.timestamp:
|
type: date
|
||||||
type: date
|
timestamp.date_histogram._count:
|
||||||
timestamp.date_histogram._count:
|
type: long
|
||||||
type: long
|
voltage.avg.value:
|
||||||
voltage.avg.value:
|
type: double
|
||||||
type: double
|
voltage.avg._count:
|
||||||
voltage.avg._count:
|
type: long
|
||||||
type: long
|
_rollup.id:
|
||||||
_rollup.id:
|
type: keyword
|
||||||
type: keyword
|
_rollup.version:
|
||||||
_rollup.version:
|
type: long
|
||||||
type: long
|
_meta:
|
||||||
_meta:
|
_rollup:
|
||||||
_rollup:
|
sensor:
|
||||||
sensor:
|
cron: "* * * * * ?"
|
||||||
cron: "* * * * * ?"
|
rollup_index: "sensor_rollup"
|
||||||
rollup_index: "sensor_rollup"
|
index_pattern: "sensor-*"
|
||||||
index_pattern: "sensor-*"
|
timeout: "20s"
|
||||||
timeout: "20s"
|
page_size: 1000
|
||||||
page_size: 1000
|
groups:
|
||||||
groups:
|
date_histogram:
|
||||||
date_histogram:
|
delay: "7d"
|
||||||
delay: "7d"
|
field: "timestamp"
|
||||||
field: "timestamp"
|
interval: "60m"
|
||||||
interval: "60m"
|
time_zone: "UTC"
|
||||||
time_zone: "UTC"
|
terms:
|
||||||
terms:
|
fields:
|
||||||
fields:
|
- "node"
|
||||||
- "node"
|
id: sensor
|
||||||
id: sensor
|
metrics:
|
||||||
metrics:
|
- field: "temperature"
|
||||||
- field: "temperature"
|
metrics:
|
||||||
metrics:
|
- min
|
||||||
- min
|
- max
|
||||||
- max
|
- sum
|
||||||
- sum
|
- field: "voltage"
|
||||||
- field: "voltage"
|
metrics:
|
||||||
metrics:
|
- avg
|
||||||
- avg
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: sensor_rollup
|
index: sensor_rollup
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -1093,16 +1068,14 @@ buildRestTests.setups['reviews'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
product:
|
||||||
product:
|
type: keyword
|
||||||
type: keyword
|
rating:
|
||||||
rating:
|
type: long
|
||||||
type: long
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: reviews
|
index: reviews
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index": {"_id": "1"}}
|
{"index": {"_id": "1"}}
|
||||||
|
@ -1139,22 +1112,20 @@ buildRestTests.setups['seats'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
theatre:
|
||||||
theatre:
|
type: keyword
|
||||||
type: keyword
|
cost:
|
||||||
cost:
|
type: long
|
||||||
type: long
|
row:
|
||||||
row:
|
type: long
|
||||||
type: long
|
number:
|
||||||
number:
|
type: long
|
||||||
type: long
|
sold:
|
||||||
sold:
|
type: boolean
|
||||||
type: boolean
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: seats
|
index: seats
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{"_id": "1"}}
|
{"index":{"_id": "1"}}
|
||||||
|
|
|
@ -14,26 +14,22 @@ setup:
|
||||||
settings:
|
settings:
|
||||||
number_of_shards: 2
|
number_of_shards: 2
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
dval:
|
||||||
dval:
|
type: double
|
||||||
type: double
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test
|
index: test
|
||||||
type: _doc
|
|
||||||
id: d1
|
id: d1
|
||||||
body: {"dval": 10}
|
body: {"dval": 10}
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test
|
index: test
|
||||||
type: _doc
|
|
||||||
id: d2
|
id: d2
|
||||||
body: {"dval": 100}
|
body: {"dval": 100}
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test
|
index: test
|
||||||
type: _doc
|
|
||||||
id: d3
|
id: d3
|
||||||
body: {"dval": 1000}
|
body: {"dval": 1000}
|
||||||
|
|
||||||
|
@ -225,10 +221,9 @@ setup:
|
||||||
settings:
|
settings:
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
date:
|
||||||
date:
|
type: date
|
||||||
type: date
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test
|
index: test
|
||||||
|
@ -309,16 +304,15 @@ setup:
|
||||||
settings:
|
settings:
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
ival:
|
||||||
ival:
|
type: integer
|
||||||
type: integer
|
lval:
|
||||||
lval:
|
type: long
|
||||||
type: long
|
fval:
|
||||||
fval:
|
type: float
|
||||||
type: float
|
dval:
|
||||||
dval:
|
type: double
|
||||||
type: double
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
|
|
|
@ -44,9 +44,8 @@
|
||||||
index: test
|
index: test
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
_source:
|
||||||
_source:
|
enabled: false
|
||||||
enabled: false
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test
|
index: test
|
||||||
|
|
|
@ -135,3 +135,22 @@
|
||||||
properties:
|
properties:
|
||||||
"":
|
"":
|
||||||
type: keyword
|
type: keyword
|
||||||
|
|
||||||
|
---
|
||||||
|
"Create index with explicit _doc type":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: include_type_name defaults to true before 7.0
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
indices.create:
|
||||||
|
index: test_index
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
_doc:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- match: { error.type: "illegal_argument_exception" }
|
||||||
|
- match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." }
|
||||||
|
|
|
@ -68,3 +68,26 @@
|
||||||
properties:
|
properties:
|
||||||
"":
|
"":
|
||||||
type: keyword
|
type: keyword
|
||||||
|
|
||||||
|
---
|
||||||
|
"Put mappings with explicit _doc type":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: include_type_name defaults to true before 7.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test_index
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
indices.put_mapping:
|
||||||
|
index: test_index
|
||||||
|
body:
|
||||||
|
_doc:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- match: { error.type: "illegal_argument_exception" }
|
||||||
|
- match: { error.reason: "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." }
|
||||||
|
|
|
@ -238,3 +238,24 @@
|
||||||
indices.put_template:
|
indices.put_template:
|
||||||
name: test
|
name: test
|
||||||
body: {}
|
body: {}
|
||||||
|
|
||||||
|
---
|
||||||
|
"Put template with explicit _doc type":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: include_type_name defaults to true before 7.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
indices.put_template:
|
||||||
|
name: test
|
||||||
|
body:
|
||||||
|
index_patterns: test-*
|
||||||
|
mappings:
|
||||||
|
_doc:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- match: { error.type: "illegal_argument_exception" }
|
||||||
|
- match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." }
|
||||||
|
|
|
@ -41,3 +41,32 @@
|
||||||
|
|
||||||
- match: { conditions: { "[max_docs: 2]": true } }
|
- match: { conditions: { "[max_docs: 2]": true } }
|
||||||
- match: { rolled_over: true }
|
- match: { rolled_over: true }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Mappings with explicit _doc type":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: include_type_name defaults to true before 7.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: logs-1
|
||||||
|
body:
|
||||||
|
aliases:
|
||||||
|
logs_search: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
indices.rollover:
|
||||||
|
alias: "logs_search"
|
||||||
|
body:
|
||||||
|
conditions:
|
||||||
|
max_docs: 2
|
||||||
|
mappings:
|
||||||
|
_doc:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- match: { error.caused_by.type: "illegal_argument_exception" }
|
||||||
|
- match: { error.caused_by.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." }
|
||||||
|
|
|
@ -70,13 +70,18 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
||||||
CONDITIONS, ObjectParser.ValueType.OBJECT);
|
CONDITIONS, ObjectParser.ValueType.OBJECT);
|
||||||
PARSER.declareField((parser, request, context) -> request.createIndexRequest.settings(parser.map()),
|
PARSER.declareField((parser, request, context) -> request.createIndexRequest.settings(parser.map()),
|
||||||
CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT);
|
CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT);
|
||||||
PARSER.declareField((parser, request, isTypeIncluded) -> {
|
PARSER.declareField((parser, request, includeTypeName) -> {
|
||||||
if (isTypeIncluded) {
|
if (includeTypeName) {
|
||||||
for (Map.Entry<String, Object> mappingsEntry : parser.map().entrySet()) {
|
for (Map.Entry<String, Object> mappingsEntry : parser.map().entrySet()) {
|
||||||
request.createIndexRequest.mapping(mappingsEntry.getKey(), (Map<String, Object>) mappingsEntry.getValue());
|
request.createIndexRequest.mapping(mappingsEntry.getKey(), (Map<String, Object>) mappingsEntry.getValue());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// a type is not included, add a dummy _doc type
|
// a type is not included, add a dummy _doc type
|
||||||
|
Map<String, Object> mappings = parser.map();
|
||||||
|
if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) {
|
||||||
|
throw new IllegalArgumentException("The mapping definition cannot be nested under a type " +
|
||||||
|
"[" + MapperService.SINGLE_MAPPING_NAME + "] unless include_type_name is set to true.");
|
||||||
|
}
|
||||||
request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, parser.map());
|
request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, parser.map());
|
||||||
}
|
}
|
||||||
}, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT);
|
}, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT);
|
||||||
|
|
|
@ -22,13 +22,12 @@ package org.elasticsearch.cluster.metadata;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
||||||
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
|
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
|
||||||
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.ClusterStateTaskConfig;
|
import org.elasticsearch.cluster.ClusterStateTaskConfig;
|
||||||
|
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
|
||||||
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
|
@ -37,8 +36,7 @@ import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
@ -55,6 +53,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.elasticsearch.index.mapper.MapperService.isMappingSourceTyped;
|
||||||
import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.NO_LONGER_ASSIGNED;
|
import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.NO_LONGER_ASSIGNED;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -279,7 +278,7 @@ public class MetaDataMappingService {
|
||||||
if (mappingType == null) {
|
if (mappingType == null) {
|
||||||
mappingType = newMapper.type();
|
mappingType = newMapper.type();
|
||||||
} else if (mappingType.equals(newMapper.type()) == false
|
} else if (mappingType.equals(newMapper.type()) == false
|
||||||
&& (isMappingSourceTyped(mapperService, mappingUpdateSource, request.type())
|
&& (isMappingSourceTyped(request.type(), mappingUpdateSource)
|
||||||
|| mapperService.resolveDocumentType(mappingType).equals(newMapper.type()) == false)) {
|
|| mapperService.resolveDocumentType(mappingType).equals(newMapper.type()) == false)) {
|
||||||
throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition.");
|
throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition.");
|
||||||
}
|
}
|
||||||
|
@ -304,7 +303,7 @@ public class MetaDataMappingService {
|
||||||
// are handling a typeless call. In such a case, we override _doc with the actual type
|
// are handling a typeless call. In such a case, we override _doc with the actual type
|
||||||
// name in the mappings. This allows to use typeless APIs on typed indices.
|
// name in the mappings. This allows to use typeless APIs on typed indices.
|
||||||
String typeForUpdate = mappingType; // the type to use to apply the mapping update
|
String typeForUpdate = mappingType; // the type to use to apply the mapping update
|
||||||
if (isMappingSourceTyped(mapperService, mappingUpdateSource, request.type()) == false) {
|
if (isMappingSourceTyped(request.type(), mappingUpdateSource) == false) {
|
||||||
typeForUpdate = mapperService.resolveDocumentType(mappingType);
|
typeForUpdate = mapperService.resolveDocumentType(mappingType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -371,15 +370,6 @@ public class MetaDataMappingService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns {@code true} if the given {@code mappingSource} includes a type
|
|
||||||
* as a top-level object.
|
|
||||||
*/
|
|
||||||
private static boolean isMappingSourceTyped(MapperService mapperService, CompressedXContent mappingSource, String type) {
|
|
||||||
Map<String, Object> root = XContentHelper.convertToMap(mappingSource.compressedReference(), true, XContentType.JSON).v2();
|
|
||||||
return root.size() == 1 && root.keySet().iterator().next().equals(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void putMapping(final PutMappingClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
public void putMapping(final PutMappingClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||||
clusterService.submitStateUpdateTask("put-mapping",
|
clusterService.submitStateUpdateTask("put-mapping",
|
||||||
request,
|
request,
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.AbstractIndexComponent;
|
import org.elasticsearch.index.AbstractIndexComponent;
|
||||||
|
@ -665,6 +666,20 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@code true} if the given {@code mappingSource} includes a type
|
||||||
|
* as a top-level object.
|
||||||
|
*/
|
||||||
|
public static boolean isMappingSourceTyped(String type, Map<String, Object> mapping) {
|
||||||
|
return mapping.size() == 1 && mapping.keySet().iterator().next().equals(type);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static boolean isMappingSourceTyped(String type, CompressedXContent mappingSource) {
|
||||||
|
Map<String, Object> root = XContentHelper.convertToMap(mappingSource.compressedReference(), true, XContentType.JSON).v2();
|
||||||
|
return isMappingSourceTyped(type, root);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves a type from a mapping-related request into the type that should be used when
|
* Resolves a type from a mapping-related request into the type that should be used when
|
||||||
* merging and updating mappings.
|
* merging and updating mappings.
|
||||||
|
|
|
@ -64,18 +64,38 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index"));
|
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index"));
|
||||||
|
|
||||||
if (request.hasContent()) {
|
if (request.hasContent()) {
|
||||||
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(request.content(), false, request.getXContentType()).v2();
|
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false,
|
||||||
if (includeTypeName == false && sourceAsMap.containsKey("mappings")) {
|
request.getXContentType()).v2();
|
||||||
Map<String, Object> newSourceAsMap = new HashMap<>(sourceAsMap);
|
sourceAsMap = prepareMappings(sourceAsMap, includeTypeName);
|
||||||
newSourceAsMap.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, sourceAsMap.get("mappings")));
|
|
||||||
sourceAsMap = newSourceAsMap;
|
|
||||||
}
|
|
||||||
createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE);
|
createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE);
|
||||||
}
|
}
|
||||||
|
|
||||||
createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout()));
|
createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout()));
|
||||||
createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout()));
|
createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout()));
|
||||||
createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
|
createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
|
||||||
return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel));
|
return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static Map<String, Object> prepareMappings(Map<String, Object> source, boolean includeTypeName) {
|
||||||
|
if (includeTypeName
|
||||||
|
|| source.containsKey("mappings") == false
|
||||||
|
|| (source.get("mappings") instanceof Map) == false) {
|
||||||
|
return source;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> newSource = new HashMap<>(source);
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Map<String, Object> mappings = (Map<String, Object>) source.get("mappings");
|
||||||
|
if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) {
|
||||||
|
throw new IllegalArgumentException("The mapping definition cannot be nested under a type " +
|
||||||
|
"[" + MapperService.SINGLE_MAPPING_NAME + "] unless include_type_name is set to true.");
|
||||||
|
}
|
||||||
|
|
||||||
|
newSource.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, mappings));
|
||||||
|
return newSource;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
|
@ -35,7 +34,6 @@ import org.elasticsearch.rest.action.RestToXContentListener;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class RestPutIndexTemplateAction extends BaseRestHandler {
|
public class RestPutIndexTemplateAction extends BaseRestHandler {
|
||||||
|
@ -59,6 +57,8 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
|
boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
||||||
|
|
||||||
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name"));
|
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name"));
|
||||||
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) {
|
||||||
deprecationLogger.deprecatedAndMaybeLog("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE);
|
deprecationLogger.deprecatedAndMaybeLog("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
|
@ -74,22 +74,11 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
|
||||||
putRequest.create(request.paramAsBoolean("create", false));
|
putRequest.create(request.paramAsBoolean("create", false));
|
||||||
putRequest.cause(request.param("cause", ""));
|
putRequest.cause(request.param("cause", ""));
|
||||||
|
|
||||||
boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY);
|
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false,
|
||||||
Map<String, Object> sourceAsMap = prepareRequestSource(request, includeTypeName);
|
request.getXContentType()).v2();
|
||||||
|
sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap, includeTypeName);
|
||||||
putRequest.source(sourceAsMap);
|
putRequest.source(sourceAsMap);
|
||||||
|
|
||||||
return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel));
|
return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> prepareRequestSource(RestRequest request, boolean includeTypeName) {
|
|
||||||
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false,
|
|
||||||
request.getXContentType()).v2();
|
|
||||||
if (includeTypeName == false && sourceAsMap.containsKey("mappings")) {
|
|
||||||
Map<String, Object> newSourceAsMap = new HashMap<>(sourceAsMap);
|
|
||||||
newSourceAsMap.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, sourceAsMap.get("mappings")));
|
|
||||||
return newSourceAsMap;
|
|
||||||
} else {
|
|
||||||
return sourceAsMap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
|
@ -33,8 +34,10 @@ import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.client.Requests.putMappingRequest;
|
import static org.elasticsearch.client.Requests.putMappingRequest;
|
||||||
|
import static org.elasticsearch.index.mapper.MapperService.isMappingSourceTyped;
|
||||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||||
|
|
||||||
|
@ -81,15 +84,20 @@ public class RestPutMappingAction extends BaseRestHandler {
|
||||||
deprecationLogger.deprecatedAndMaybeLog("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
deprecationLogger.deprecatedAndMaybeLog("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||||
|
|
||||||
final String type = request.param("type");
|
final String type = request.param("type");
|
||||||
if (type != null && includeTypeName == false) {
|
putMappingRequest.type(includeTypeName ? type : MapperService.SINGLE_MAPPING_NAME);
|
||||||
|
|
||||||
|
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false,
|
||||||
|
request.getXContentType()).v2();
|
||||||
|
if (includeTypeName == false &&
|
||||||
|
(type != null || isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap))) {
|
||||||
throw new IllegalArgumentException("Types cannot be provided in put mapping requests, unless " +
|
throw new IllegalArgumentException("Types cannot be provided in put mapping requests, unless " +
|
||||||
"the include_type_name parameter is set to true.");
|
"the include_type_name parameter is set to true.");
|
||||||
}
|
}
|
||||||
|
|
||||||
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
putMappingRequest.source(sourceAsMap);
|
||||||
putMappingRequest.type(includeTypeName ? type : MapperService.SINGLE_MAPPING_NAME);
|
|
||||||
putMappingRequest.source(request.requiredContent(), request.getXContentType());
|
|
||||||
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
|
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
|
||||||
putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));
|
putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));
|
||||||
putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions()));
|
putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions()));
|
||||||
|
|
|
@ -20,7 +20,11 @@
|
||||||
package org.elasticsearch.rest.action.admin.indices;
|
package org.elasticsearch.rest.action.admin.indices;
|
||||||
|
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||||
import org.elasticsearch.test.rest.RestActionTestCase;
|
import org.elasticsearch.test.rest.RestActionTestCase;
|
||||||
|
@ -59,4 +63,80 @@ public class RestCreateIndexActionTests extends RestActionTestCase {
|
||||||
.build();
|
.build();
|
||||||
action.prepareRequest(validRequest, mock(NodeClient.class));
|
action.prepareRequest(validRequest, mock(NodeClient.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPrepareTypelessRequest() throws IOException {
|
||||||
|
XContentBuilder content = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("mappings")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field1").field("type", "keyword").endObject()
|
||||||
|
.startObject("field2").field("type", "text").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("aliases")
|
||||||
|
.startObject("read_alias").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject();
|
||||||
|
|
||||||
|
Map<String, Object> contentAsMap = XContentHelper.convertToMap(
|
||||||
|
BytesReference.bytes(content), true, content.contentType()).v2();
|
||||||
|
boolean includeTypeName = false;
|
||||||
|
Map<String, Object> source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName);
|
||||||
|
|
||||||
|
XContentBuilder expectedContent = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("mappings")
|
||||||
|
.startObject("_doc")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field1").field("type", "keyword").endObject()
|
||||||
|
.startObject("field2").field("type", "text").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("aliases")
|
||||||
|
.startObject("read_alias").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject();
|
||||||
|
Map<String, Object> expectedContentAsMap = XContentHelper.convertToMap(
|
||||||
|
BytesReference.bytes(expectedContent), true, expectedContent.contentType()).v2();
|
||||||
|
|
||||||
|
assertEquals(expectedContentAsMap, source);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPrepareTypedRequest() throws IOException {
|
||||||
|
XContentBuilder content = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("mappings")
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field1").field("type", "keyword").endObject()
|
||||||
|
.startObject("field2").field("type", "text").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("aliases")
|
||||||
|
.startObject("read_alias").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject();
|
||||||
|
|
||||||
|
Map<String, Object> contentAsMap = XContentHelper.convertToMap(
|
||||||
|
BytesReference.bytes(content), true, content.contentType()).v2();
|
||||||
|
boolean includeTypeName = true;
|
||||||
|
Map<String, Object> source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName);
|
||||||
|
|
||||||
|
assertEquals(contentAsMap, source);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMalformedMappings() throws IOException {
|
||||||
|
XContentBuilder content = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.field("mappings", "some string")
|
||||||
|
.startObject("aliases")
|
||||||
|
.startObject("read_alias").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject();
|
||||||
|
|
||||||
|
Map<String, Object> contentAsMap = XContentHelper.convertToMap(
|
||||||
|
BytesReference.bytes(content), true, content.contentType()).v2();
|
||||||
|
|
||||||
|
boolean includeTypeName = false;
|
||||||
|
Map<String, Object> source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName);
|
||||||
|
assertEquals(contentAsMap, source);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||||
|
@ -46,51 +45,6 @@ public class RestPutIndexTemplateActionTests extends RestActionTestCase {
|
||||||
action = new RestPutIndexTemplateAction(Settings.EMPTY, controller());
|
action = new RestPutIndexTemplateAction(Settings.EMPTY, controller());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPrepareTypelessRequest() throws IOException {
|
|
||||||
XContentBuilder content = XContentFactory.jsonBuilder().startObject()
|
|
||||||
.startObject("mappings")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("field1").field("type", "keyword").endObject()
|
|
||||||
.startObject("field2").field("type", "text").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.startObject("aliases")
|
|
||||||
.startObject("read_alias").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject();
|
|
||||||
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
|
|
||||||
.withMethod(RestRequest.Method.PUT)
|
|
||||||
.withPath("/_template/_some_template")
|
|
||||||
.withContent(BytesReference.bytes(content), XContentType.JSON)
|
|
||||||
.build();
|
|
||||||
action.prepareRequest(request, mock(NodeClient.class));
|
|
||||||
|
|
||||||
// Internally the above prepareRequest method calls prepareRequestSource to inject a
|
|
||||||
// default type into the mapping. Here we test that this does what is expected by
|
|
||||||
// explicitly calling that same helper function
|
|
||||||
boolean includeTypeName = false;
|
|
||||||
Map<String, Object> source = action.prepareRequestSource(request, includeTypeName);
|
|
||||||
|
|
||||||
XContentBuilder expectedContent = XContentFactory.jsonBuilder().startObject()
|
|
||||||
.startObject("mappings")
|
|
||||||
.startObject("_doc")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("field1").field("type", "keyword").endObject()
|
|
||||||
.startObject("field2").field("type", "text").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.startObject("aliases")
|
|
||||||
.startObject("read_alias").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject();
|
|
||||||
Map<String, Object> expectedContentAsMap = XContentHelper.convertToMap(
|
|
||||||
BytesReference.bytes(expectedContent), true, expectedContent.contentType()).v2();
|
|
||||||
|
|
||||||
assertEquals(expectedContentAsMap, source);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIncludeTypeName() throws IOException {
|
public void testIncludeTypeName() throws IOException {
|
||||||
XContentBuilder typedContent = XContentFactory.jsonBuilder().startObject()
|
XContentBuilder typedContent = XContentFactory.jsonBuilder().startObject()
|
||||||
.startObject("mappings")
|
.startObject("mappings")
|
||||||
|
@ -116,25 +70,5 @@ public class RestPutIndexTemplateActionTests extends RestActionTestCase {
|
||||||
.build();
|
.build();
|
||||||
action.prepareRequest(request, mock(NodeClient.class));
|
action.prepareRequest(request, mock(NodeClient.class));
|
||||||
assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE);
|
assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE);
|
||||||
boolean includeTypeName = true;
|
}
|
||||||
Map<String, Object> source = action.prepareRequestSource(request, includeTypeName);
|
|
||||||
|
|
||||||
XContentBuilder expectedContent = XContentFactory.jsonBuilder().startObject()
|
|
||||||
.startObject("mappings")
|
|
||||||
.startObject("my_doc")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("field1").field("type", "keyword").endObject()
|
|
||||||
.startObject("field2").field("type", "text").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.startObject("aliases")
|
|
||||||
.startObject("read_alias").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject();
|
|
||||||
Map<String, Object> expectedContentAsMap = XContentHelper.convertToMap(
|
|
||||||
BytesReference.bytes(expectedContent), true, expectedContent.contentType()).v2();
|
|
||||||
|
|
||||||
assertEquals(expectedContentAsMap, source);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -464,16 +464,15 @@ setups['sensor_rollup_job'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
xpack.rollup.put_job:
|
xpack.rollup.put_job:
|
||||||
id: "sensor"
|
id: "sensor"
|
||||||
|
@ -514,21 +513,19 @@ setups['sensor_started_rollup_job'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: sensor-1
|
index: sensor-1
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
@ -588,26 +585,25 @@ setups['sensor_index'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
load:
|
||||||
load:
|
type: double
|
||||||
type: double
|
net_in:
|
||||||
net_in:
|
type: long
|
||||||
type: long
|
net_out:
|
||||||
net_out:
|
type: long
|
||||||
type: long
|
hostname:
|
||||||
hostname:
|
type: keyword
|
||||||
type: keyword
|
datacenter:
|
||||||
datacenter:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
setups['sensor_prefab_data'] = '''
|
setups['sensor_prefab_data'] = '''
|
||||||
|
@ -619,16 +615,15 @@ setups['sensor_prefab_data'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
temperature:
|
||||||
temperature:
|
type: long
|
||||||
type: long
|
voltage:
|
||||||
voltage:
|
type: float
|
||||||
type: float
|
node:
|
||||||
node:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
indices.create:
|
indices.create:
|
||||||
index: sensor_rollup
|
index: sensor_rollup
|
||||||
|
@ -637,63 +632,61 @@ setups['sensor_prefab_data'] = '''
|
||||||
number_of_shards: 1
|
number_of_shards: 1
|
||||||
number_of_replicas: 0
|
number_of_replicas: 0
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
node.terms.value:
|
||||||
node.terms.value:
|
type: keyword
|
||||||
type: keyword
|
temperature.sum.value:
|
||||||
temperature.sum.value:
|
type: double
|
||||||
type: double
|
temperature.max.value:
|
||||||
temperature.max.value:
|
type: double
|
||||||
type: double
|
temperature.min.value:
|
||||||
temperature.min.value:
|
type: double
|
||||||
type: double
|
timestamp.date_histogram.time_zone:
|
||||||
timestamp.date_histogram.time_zone:
|
type: keyword
|
||||||
type: keyword
|
timestamp.date_histogram.interval:
|
||||||
timestamp.date_histogram.interval:
|
type: keyword
|
||||||
type: keyword
|
timestamp.date_histogram.timestamp:
|
||||||
timestamp.date_histogram.timestamp:
|
type: date
|
||||||
type: date
|
timestamp.date_histogram._count:
|
||||||
timestamp.date_histogram._count:
|
type: long
|
||||||
type: long
|
voltage.avg.value:
|
||||||
voltage.avg.value:
|
type: double
|
||||||
type: double
|
voltage.avg._count:
|
||||||
voltage.avg._count:
|
type: long
|
||||||
type: long
|
_rollup.id:
|
||||||
_rollup.id:
|
type: keyword
|
||||||
type: keyword
|
_rollup.version:
|
||||||
_rollup.version:
|
type: long
|
||||||
type: long
|
_meta:
|
||||||
_meta:
|
_rollup:
|
||||||
_rollup:
|
sensor:
|
||||||
sensor:
|
cron: "* * * * * ?"
|
||||||
cron: "* * * * * ?"
|
rollup_index: "sensor_rollup"
|
||||||
rollup_index: "sensor_rollup"
|
index_pattern: "sensor-*"
|
||||||
index_pattern: "sensor-*"
|
timeout: "20s"
|
||||||
timeout: "20s"
|
page_size: 1000
|
||||||
page_size: 1000
|
groups:
|
||||||
groups:
|
date_histogram:
|
||||||
date_histogram:
|
field: "timestamp"
|
||||||
field: "timestamp"
|
interval: "7d"
|
||||||
interval: "7d"
|
time_zone: "UTC"
|
||||||
time_zone: "UTC"
|
terms:
|
||||||
terms:
|
fields:
|
||||||
fields:
|
- "node"
|
||||||
- "node"
|
id: sensor
|
||||||
id: sensor
|
metrics:
|
||||||
metrics:
|
- field: "temperature"
|
||||||
- field: "temperature"
|
metrics:
|
||||||
metrics:
|
- min
|
||||||
- min
|
- max
|
||||||
- max
|
- sum
|
||||||
- sum
|
- field: "voltage"
|
||||||
- field: "voltage"
|
metrics:
|
||||||
metrics:
|
- avg
|
||||||
- avg
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
bulk:
|
bulk:
|
||||||
index: sensor_rollup
|
index: sensor_rollup
|
||||||
type: _doc
|
|
||||||
refresh: true
|
refresh: true
|
||||||
body: |
|
body: |
|
||||||
{"index":{}}
|
{"index":{}}
|
||||||
|
|
|
@ -210,7 +210,7 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase {
|
||||||
// Create an index on the leader using the template set up above
|
// Create an index on the leader using the template set up above
|
||||||
Request createIndexRequest = new Request("PUT", "/" + indexName);
|
Request createIndexRequest = new Request("PUT", "/" + indexName);
|
||||||
createIndexRequest.setJsonEntity("{" +
|
createIndexRequest.setJsonEntity("{" +
|
||||||
"\"mappings\": {\"_doc\": {\"properties\": {\"field\": {\"type\": \"keyword\"}}}}, " +
|
"\"mappings\": {\"properties\": {\"field\": {\"type\": \"keyword\"}}}, " +
|
||||||
"\"aliases\": {\"" + alias + "\": {\"is_write_index\": true}} }");
|
"\"aliases\": {\"" + alias + "\": {\"is_write_index\": true}} }");
|
||||||
assertOK(leaderClient.performRequest(createIndexRequest));
|
assertOK(leaderClient.performRequest(createIndexRequest));
|
||||||
// Check that the new index is created
|
// Check that the new index is created
|
||||||
|
|
|
@ -83,50 +83,46 @@ public class DataLoader {
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
createIndex.startObject("mappings");
|
createIndex.startObject("mappings");
|
||||||
{
|
{
|
||||||
createIndex.startObject("_doc");
|
createIndex.startObject("properties");
|
||||||
{
|
{
|
||||||
createIndex.startObject("properties");
|
createIndex.startObject("emp_no").field("type", "integer");
|
||||||
{
|
if (extraFields) {
|
||||||
createIndex.startObject("emp_no").field("type", "integer");
|
createIndex.field("copy_to", "extra_no");
|
||||||
if (extraFields) {
|
|
||||||
createIndex.field("copy_to", "extra_no");
|
|
||||||
}
|
|
||||||
createIndex.endObject();
|
|
||||||
if (extraFields) {
|
|
||||||
createIndex.startObject("extra_no").field("type", "integer").endObject();
|
|
||||||
}
|
|
||||||
createString("first_name", createIndex);
|
|
||||||
createString("last_name", createIndex);
|
|
||||||
createIndex.startObject("gender").field("type", "keyword");
|
|
||||||
if (extraFields) {
|
|
||||||
createIndex.field("copy_to", "extra_gender");
|
|
||||||
}
|
|
||||||
createIndex.endObject();
|
|
||||||
|
|
||||||
if (extraFields) {
|
|
||||||
createIndex.startObject("extra_gender").field("type", "keyword").endObject();
|
|
||||||
createIndex.startObject("extra.info.gender")
|
|
||||||
.field("type", "alias")
|
|
||||||
.field("path", "gender")
|
|
||||||
.endObject();
|
|
||||||
}
|
|
||||||
|
|
||||||
createIndex.startObject("birth_date").field("type", "date").endObject();
|
|
||||||
createIndex.startObject("hire_date").field("type", "date").endObject();
|
|
||||||
createIndex.startObject("salary").field("type", "integer").endObject();
|
|
||||||
createIndex.startObject("languages").field("type", "byte").endObject();
|
|
||||||
{
|
|
||||||
createIndex.startObject("dep").field("type", "nested");
|
|
||||||
createIndex.startObject("properties");
|
|
||||||
createIndex.startObject("dep_id").field("type", "keyword").endObject();
|
|
||||||
createString("dep_name", createIndex);
|
|
||||||
createIndex.startObject("from_date").field("type", "date").endObject();
|
|
||||||
createIndex.startObject("to_date").field("type", "date").endObject();
|
|
||||||
createIndex.endObject();
|
|
||||||
createIndex.endObject();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
|
if (extraFields) {
|
||||||
|
createIndex.startObject("extra_no").field("type", "integer").endObject();
|
||||||
|
}
|
||||||
|
createString("first_name", createIndex);
|
||||||
|
createString("last_name", createIndex);
|
||||||
|
createIndex.startObject("gender").field("type", "keyword");
|
||||||
|
if (extraFields) {
|
||||||
|
createIndex.field("copy_to", "extra_gender");
|
||||||
|
}
|
||||||
|
createIndex.endObject();
|
||||||
|
|
||||||
|
if (extraFields) {
|
||||||
|
createIndex.startObject("extra_gender").field("type", "keyword").endObject();
|
||||||
|
createIndex.startObject("extra.info.gender")
|
||||||
|
.field("type", "alias")
|
||||||
|
.field("path", "gender")
|
||||||
|
.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
createIndex.startObject("birth_date").field("type", "date").endObject();
|
||||||
|
createIndex.startObject("hire_date").field("type", "date").endObject();
|
||||||
|
createIndex.startObject("salary").field("type", "integer").endObject();
|
||||||
|
createIndex.startObject("languages").field("type", "byte").endObject();
|
||||||
|
{
|
||||||
|
createIndex.startObject("dep").field("type", "nested");
|
||||||
|
createIndex.startObject("properties");
|
||||||
|
createIndex.startObject("dep_id").field("type", "keyword").endObject();
|
||||||
|
createString("dep_name", createIndex);
|
||||||
|
createIndex.startObject("from_date").field("type", "date").endObject();
|
||||||
|
createIndex.startObject("to_date").field("type", "date").endObject();
|
||||||
|
createIndex.endObject();
|
||||||
|
createIndex.endObject();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
}
|
}
|
||||||
|
@ -211,20 +207,16 @@ public class DataLoader {
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
createIndex.startObject("mappings");
|
createIndex.startObject("mappings");
|
||||||
{
|
{
|
||||||
createIndex.startObject("_doc");
|
createIndex.startObject("properties");
|
||||||
{
|
{
|
||||||
createIndex.startObject("properties");
|
createIndex.startObject("id").field("type", "integer").endObject();
|
||||||
{
|
createIndex.startObject("@timestamp").field("type", "date").endObject();
|
||||||
createIndex.startObject("id").field("type", "integer").endObject();
|
createIndex.startObject("bytes_in").field("type", "integer").endObject();
|
||||||
createIndex.startObject("@timestamp").field("type", "date").endObject();
|
createIndex.startObject("bytes_out").field("type", "integer").endObject();
|
||||||
createIndex.startObject("bytes_in").field("type", "integer").endObject();
|
createIndex.startObject("client_ip").field("type", "ip").endObject();
|
||||||
createIndex.startObject("bytes_out").field("type", "integer").endObject();
|
createIndex.startObject("client_port").field("type", "integer").endObject();
|
||||||
createIndex.startObject("client_ip").field("type", "ip").endObject();
|
createIndex.startObject("dest_ip").field("type", "ip").endObject();
|
||||||
createIndex.startObject("client_port").field("type", "integer").endObject();
|
createIndex.startObject("status").field("type", "keyword").endObject();
|
||||||
createIndex.startObject("dest_ip").field("type", "ip").endObject();
|
|
||||||
createIndex.startObject("status").field("type", "keyword").endObject();
|
|
||||||
}
|
|
||||||
createIndex.endObject();
|
|
||||||
}
|
}
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
}
|
}
|
||||||
|
@ -263,16 +255,12 @@ public class DataLoader {
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
createIndex.startObject("mappings");
|
createIndex.startObject("mappings");
|
||||||
{
|
{
|
||||||
createIndex.startObject("_doc");
|
createIndex.startObject("properties");
|
||||||
{
|
{
|
||||||
createIndex.startObject("properties");
|
createString("name", createIndex);
|
||||||
{
|
createString("author", createIndex);
|
||||||
createString("name", createIndex);
|
createIndex.startObject("release_date").field("type", "date").endObject();
|
||||||
createString("author", createIndex);
|
createIndex.startObject("page_count").field("type", "short").endObject();
|
||||||
createIndex.startObject("release_date").field("type", "date").endObject();
|
|
||||||
createIndex.startObject("page_count").field("type", "short").endObject();
|
|
||||||
}
|
|
||||||
createIndex.endObject();
|
|
||||||
}
|
}
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
}
|
}
|
||||||
|
@ -329,4 +317,4 @@ public class DataLoader {
|
||||||
public static InputStream readFromJarUrl(URL source) throws IOException {
|
public static InputStream readFromJarUrl(URL source) throws IOException {
|
||||||
return source.openStream();
|
return source.openStream();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,16 +30,12 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
|
||||||
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
|
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
|
||||||
createIndex.startObject("mappings");
|
createIndex.startObject("mappings");
|
||||||
{
|
{
|
||||||
createIndex.startObject("_doc");
|
createIndex.startObject("properties");
|
||||||
{
|
{
|
||||||
|
createIndex.startObject("nested").field("type", "nested");
|
||||||
createIndex.startObject("properties");
|
createIndex.startObject("properties");
|
||||||
{
|
createIndex.startObject("inner_field").field("type", "integer").endObject();
|
||||||
createIndex.startObject("nested").field("type", "nested");
|
createIndex.endObject();
|
||||||
createIndex.startObject("properties");
|
|
||||||
createIndex.startObject("inner_field").field("type", "integer").endObject();
|
|
||||||
createIndex.endObject();
|
|
||||||
createIndex.endObject();
|
|
||||||
}
|
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
}
|
}
|
||||||
createIndex.endObject();
|
createIndex.endObject();
|
||||||
|
|
|
@ -6,12 +6,11 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
the_field:
|
||||||
the_field:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
|
|
@ -6,12 +6,11 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
the_field:
|
||||||
the_field:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
---
|
---
|
||||||
"Test basic get_jobs":
|
"Test basic get_jobs":
|
||||||
|
|
|
@ -6,12 +6,11 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
the_field:
|
||||||
the_field:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
---
|
---
|
||||||
"Test basic put_job":
|
"Test basic put_job":
|
||||||
|
|
|
@ -6,14 +6,13 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
partition:
|
||||||
partition:
|
type: keyword
|
||||||
type: keyword
|
price:
|
||||||
price:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
@ -51,7 +50,6 @@ setup:
|
||||||
body:
|
body:
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -67,7 +65,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -83,7 +80,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -99,7 +95,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -115,7 +110,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -319,7 +313,6 @@ setup:
|
||||||
body:
|
body:
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -335,7 +328,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -351,7 +343,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -367,7 +358,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -383,7 +373,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -467,7 +456,6 @@ setup:
|
||||||
body:
|
body:
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -483,7 +471,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -499,7 +486,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -515,7 +501,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -531,7 +516,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -617,7 +601,6 @@ setup:
|
||||||
body:
|
body:
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T05:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -633,7 +616,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T06:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -649,7 +631,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T07:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -665,7 +646,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -681,7 +661,6 @@ setup:
|
||||||
|
|
||||||
- index:
|
- index:
|
||||||
_index: "foo_rollup"
|
_index: "foo_rollup"
|
||||||
_type: "_doc"
|
|
||||||
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
- timestamp.date_histogram.timestamp: "2017-01-01T08:00:00Z"
|
||||||
timestamp.date_histogram.interval: "1h"
|
timestamp.date_histogram.interval: "1h"
|
||||||
timestamp.date_histogram.time_zone: "UTC"
|
timestamp.date_histogram.time_zone: "UTC"
|
||||||
|
@ -760,14 +739,13 @@ setup:
|
||||||
index: bar
|
index: bar
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
partition:
|
||||||
partition:
|
type: keyword
|
||||||
type: keyword
|
price:
|
||||||
price:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
@ -849,14 +827,13 @@ setup:
|
||||||
index: bar
|
index: bar
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
partition:
|
||||||
partition:
|
type: keyword
|
||||||
type: keyword
|
price:
|
||||||
price:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
|
|
@ -48,18 +48,16 @@ teardown:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
index:
|
index:
|
||||||
index: foo
|
index: foo
|
||||||
type: _doc
|
|
||||||
body:
|
body:
|
||||||
timestamp: 123
|
timestamp: 123
|
||||||
value_field: 1232
|
value_field: 1232
|
||||||
|
@ -69,18 +67,16 @@ teardown:
|
||||||
index: foobar
|
index: foobar
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
index:
|
index:
|
||||||
index: foobar
|
index: foobar
|
||||||
type: _doc
|
|
||||||
body:
|
body:
|
||||||
timestamp: 123
|
timestamp: 123
|
||||||
value_field: 456
|
value_field: 456
|
||||||
|
@ -220,20 +216,18 @@ teardown:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
timestamp:
|
||||||
timestamp:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
visibility:
|
||||||
visibility:
|
type: keyword
|
||||||
type: keyword
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
index:
|
index:
|
||||||
index: foo
|
index: foo
|
||||||
type: _doc
|
|
||||||
body:
|
body:
|
||||||
timestamp: 123
|
timestamp: 123
|
||||||
value_field: 1232
|
value_field: 1232
|
||||||
|
@ -243,7 +237,6 @@ teardown:
|
||||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
index:
|
index:
|
||||||
index: foobar
|
index: foobar
|
||||||
type: _doc
|
|
||||||
body:
|
body:
|
||||||
timestamp: 123
|
timestamp: 123
|
||||||
value_field: 456
|
value_field: 456
|
||||||
|
|
|
@ -6,12 +6,11 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
the_field:
|
||||||
the_field:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
|
|
@ -6,12 +6,11 @@ setup:
|
||||||
index: foo
|
index: foo
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
_doc:
|
properties:
|
||||||
properties:
|
the_field:
|
||||||
the_field:
|
type: date
|
||||||
type: date
|
value_field:
|
||||||
value_field:
|
type: integer
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
headers:
|
headers:
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class RollupIT extends ESRestTestCase {
|
||||||
try (XContentBuilder builder = jsonBuilder()) {
|
try (XContentBuilder builder = jsonBuilder()) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
{
|
{
|
||||||
builder.startObject("mappings").startObject("_doc")
|
builder.startObject("mappings")
|
||||||
.startObject("properties")
|
.startObject("properties")
|
||||||
.startObject("timestamp")
|
.startObject("timestamp")
|
||||||
.field("type", "date")
|
.field("type", "date")
|
||||||
|
@ -78,7 +78,6 @@ public class RollupIT extends ESRestTestCase {
|
||||||
.startObject("value")
|
.startObject("value")
|
||||||
.field("type", "integer")
|
.field("type", "integer")
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject()
|
|
||||||
.endObject().endObject();
|
.endObject().endObject();
|
||||||
}
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
Loading…
Reference in New Issue