Fix the timestamp field of a data stream to @timestamp (#59210)

Backport of #59076 to 7.x branch.

The commit makes the following changes:
* The timestamp field of a data stream definition in a composable
  index template can only be set to '@timestamp'.
* Removed custom data stream timestamp field validation and reuse the validation from `TimestampFieldMapper` and
  instead only check that the _timestamp field mapping has been defined on a backing index of a data stream.
* Moved code that injects _timestamp meta field mapping from `MetadataCreateIndexService#applyCreateIndexRequestWithV2Template58956(...)` method
  to `MetadataIndexTemplateService#collectMappings(...)` method.
* Fixed a bug (#58956) that cases timestamp field validation to be performed
  for each template and instead of the final mappings that is created.
* only apply _timestamp meta field if index is created as part of a data stream or data stream rollover,
this fixes a docs test, where a regular index creation matches (logs-*) with a template with a data stream definition.

Relates to #58642
Relates to #53100
Closes #58956
Closes #58583
This commit is contained in:
Martijn van Groningen 2020-07-08 17:30:46 +02:00 committed by GitHub
parent 6a7ac3830a
commit 17bd559253
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 286 additions and 388 deletions

View File

@ -86,7 +86,7 @@ public class GetComposableIndexTemplatesResponseTests extends ESTestCase {
Long priority = randomBoolean() ? null : randomNonNegativeLong();
Long version = randomBoolean() ? null : randomNonNegativeLong();
if (randomBoolean()) {
dataStreamTemplate = new ComposableIndexTemplate.DataStreamTemplate(randomAlphaOfLength(8));
dataStreamTemplate = new ComposableIndexTemplate.DataStreamTemplate("@timestamp");
}
return new ComposableIndexTemplate(patterns, randomTemplate(), composedOf, priority, version, meta, dataStreamTemplate);
}

View File

@ -55,7 +55,7 @@ public class GetDataStreamResponseTests extends AbstractResponseTestCase<GetData
long generation = indices.size() + randomLongBetween(1, 128);
String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
indices.add(new Index(getDefaultBackingIndexName(dataStreamName, generation), UUIDs.randomBase64UUID(random())));
DataStream dataStream = new DataStream(dataStreamName, createTimestampField(randomAlphaOfLength(10)), indices, generation);
DataStream dataStream = new DataStream(dataStreamName, createTimestampField("@timestamp"), indices, generation);
return new DataStreamInfo(dataStream, ClusterHealthStatus.YELLOW, randomAlphaOfLengthBetween(2, 10),
randomAlphaOfLengthBetween(2, 10));
}

View File

@ -235,14 +235,14 @@ PUT _index_template/template
"template": {
"mappings": {
"properties": {
"date": {
"@timestamp": {
"type": "date"
}
}
}
},
"data_stream": {
"timestamp_field": "date"
"timestamp_field": "@timestamp"
}
}
-----------------------------------
@ -257,15 +257,15 @@ PUT /_data_stream/my-data-stream <1>
POST /my-data-stream/_rollover <2>
{
"conditions" : {
"max_age": "7d",
"max_docs": 1000,
"max_size": "5gb"
"max_age": "7d",
"max_docs": 1000,
"max_size": "5gb"
}
}
--------------------------------------------------
// TEST[continued]
// TEST[setup:huge_twitter]
// TEST[s/# Add > 1000 documents to my-data-stream/POST _reindex?refresh\n{"source":{"index":"twitter"},"dest":{"index":"my-data-stream","op_type":"create"}}/]
// TEST[s/# Add > 1000 documents to my-data-stream/POST _reindex?refresh\n{"source":{"index":"twitter"},"dest":{"index":"my-data-stream","op_type":"create"},"script":{"source":"ctx._source.put('@timestamp',ctx._source.remove('date'))"}}/]
<1> Creates a data stream called `my-data-stream` with one initial backing index
named `my-data-stream-000001`.
<2> This request creates a new backing index, `my-data-stream-000002`, and adds
@ -307,7 +307,7 @@ The API returns the following response:
[source,console]
-----------------------------------
DELETE /_data_stream/my-data-stream
DELETE /_index_template/*
DELETE /_index_template/template
-----------------------------------
// TEST[continued]
////

View File

@ -10,11 +10,11 @@ setup:
body:
index_patterns: logs-*
data_stream:
timestamp_field: timestamp
timestamp_field: '@timestamp'
template:
mappings:
properties:
timestamp:
'@timestamp':
type: date
---
@ -36,7 +36,7 @@ teardown:
refresh: true
body:
foo: bar
timestamp: '2020-12-12'
'@timestamp': '2020-12-12'
- do:
reindex:
@ -69,7 +69,7 @@ teardown:
refresh: true
body:
foo: bar
timestamp: '2020-12-12'
'@timestamp': '2020-12-12'
- do:
reindex:
@ -102,7 +102,7 @@ teardown:
refresh: true
body:
foo: bar
timestamp: '2020-12-12'
'@timestamp': '2020-12-12'
- do:
reindex:

View File

@ -51,4 +51,4 @@
- match: {data_streams.1.name: my_remote_cluster:simple-data-stream2}
- match: {data_streams.1.backing_indices.0: .ds-simple-data-stream2-000001}
- match: {data_streams.1.backing_indices.1: .ds-simple-data-stream2-000002}
- match: {data_streams.1.timestamp_field: "@timestamp2"}
- match: {data_streams.1.timestamp_field: "@timestamp"}

View File

@ -27,10 +27,10 @@
template:
mappings:
properties:
'@timestamp2':
'@timestamp':
type: date
data_stream:
timestamp_field: '@timestamp2'
timestamp_field: '@timestamp'
- do:
indices.create_data_stream:

View File

@ -27,10 +27,10 @@ setup:
template:
mappings:
properties:
'@timestamp2':
'@timestamp':
type: date
data_stream:
timestamp_field: '@timestamp2'
timestamp_field: '@timestamp'
---
"Create data stream":
@ -59,7 +59,7 @@ setup:
- match: { data_streams.0.status: 'GREEN' }
- match: { data_streams.0.template: 'my-template1' }
- match: { data_streams.1.name: simple-data-stream2 }
- match: { data_streams.1.timestamp_field.name: '@timestamp2' }
- match: { data_streams.1.timestamp_field.name: '@timestamp' }
- match: { data_streams.0.generation: 1 }
- length: { data_streams.1.indices: 1 }
- match: { data_streams.1.indices.0.index_name: '.ds-simple-data-stream2-000001' }
@ -129,11 +129,9 @@ setup:
indices.get_data_stream: {}
- match: { data_streams.0.name: simple-data-stream1 }
- match: { data_streams.0.timestamp_field.name: '@timestamp' }
- match: { data_streams.0.timestamp_field.mapping: {type: date} }
- match: { data_streams.0.generation: 1 }
- match: { data_streams.1.name: simple-data-stream2 }
- match: { data_streams.1.timestamp_field.name: '@timestamp2' }
- match: { data_streams.1.timestamp_field.mapping: {type: date} }
- match: { data_streams.1.timestamp_field.name: '@timestamp' }
- match: { data_streams.1.generation: 1 }
- do:
@ -141,7 +139,6 @@ setup:
name: simple-data-stream1
- match: { data_streams.0.name: simple-data-stream1 }
- match: { data_streams.0.timestamp_field.name: '@timestamp' }
- match: { data_streams.0.timestamp_field.mapping: {type: date} }
- match: { data_streams.0.generation: 1 }
- do:
@ -149,11 +146,9 @@ setup:
name: simple-data-stream*
- match: { data_streams.0.name: simple-data-stream1 }
- match: { data_streams.0.timestamp_field.name: '@timestamp' }
- match: { data_streams.0.timestamp_field.mapping: {type: date} }
- match: { data_streams.0.generation: 1 }
- match: { data_streams.1.name: simple-data-stream2 }
- match: { data_streams.1.timestamp_field.name: '@timestamp2' }
- match: { data_streams.1.timestamp_field.mapping: {type: date} }
- match: { data_streams.1.timestamp_field.name: '@timestamp' }
- match: { data_streams.1.generation: 1 }
- do:
@ -240,35 +235,35 @@ setup:
template:
mappings:
properties:
'timestamp':
'@timestamp':
type: date
data_stream:
timestamp_field: timestamp
timestamp_field: '@timestamp'
- do:
index:
index: logs-foobar
body: { timestamp: '2020-12-12' }
body: { '@timestamp': '2020-12-12' }
- match: { _index: .ds-logs-foobar-000001 }
- do:
catch: bad_request
index:
index: .ds-logs-foobar-000001
body: { timestamp: '2020-12-12' }
body: { '@timestamp': '2020-12-12' }
- do:
bulk:
body:
- create:
_index: .ds-logs-foobar-000001
- timestamp: '2020-12-12'
- '@timestamp': '2020-12-12'
- index:
_index: .ds-logs-foobar-000001
- timestamp: '2020-12-12'
- '@timestamp': '2020-12-12'
- create:
_index: logs-foobar
- timestamp: '2020-12-12'
- '@timestamp': '2020-12-12'
- match: { errors: true }
- match: { items.0.create.status: 400 }
- match: { items.0.create.error.type: illegal_argument_exception }
@ -301,10 +296,10 @@ setup:
template:
mappings:
properties:
'timestamp':
'@timestamp':
type: date
data_stream:
timestamp_field: timestamp
timestamp_field: '@timestamp'
- do:
catch: bad_request
@ -320,19 +315,19 @@ setup:
- foo: bar
- create:
_index: logs-foobar
- timestamp: '2020-12-12'
- '@timestamp': '2020-12-12'
- create:
_index: logs-foobar
- timestamp: ['2020-12-12', '2022-12-12']
- '@timestamp': ['2020-12-12', '2022-12-12']
- match: { errors: true }
- match: { items.0.create.status: 400 }
- match: { items.0.create.error.caused_by.type: illegal_argument_exception }
- match: { items.0.create.error.caused_by.reason: "data stream timestamp field [timestamp] is missing" }
- match: { items.0.create.error.caused_by.reason: "data stream timestamp field [@timestamp] is missing" }
- match: { items.1.create.result: created }
- match: { items.1.create._index: .ds-logs-foobar-000001 }
- match: { items.2.create.status: 400 }
- match: { items.2.create.error.caused_by.type: illegal_argument_exception }
- match: { items.2.create.error.caused_by.reason: "data stream timestamp field [timestamp] encountered multiple values" }
- match: { items.2.create.error.caused_by.reason: "data stream timestamp field [@timestamp] encountered multiple values" }
- do:
indices.delete_data_stream:

View File

@ -13,14 +13,14 @@
body:
index_patterns: logs-*
data_stream:
timestamp_field: timestamp
timestamp_field: '@timestamp'
template:
settings:
number_of_shards: 1
number_of_replicas: 0
mappings:
properties:
timestamp:
'@timestamp':
type: date
- do:
@ -28,7 +28,7 @@
index: logs-foobar
refresh: true
body:
'timestamp': '2020-12-12'
'@timestamp': '2020-12-12'
foo: bar
- do:
@ -43,7 +43,7 @@
indices.get_data_stream:
name: logs-foobar
- match: { data_streams.0.name: logs-foobar }
- match: { data_streams.0.timestamp_field.name: 'timestamp' }
- match: { data_streams.0.timestamp_field.name: '@timestamp' }
- length: { data_streams.0.indices: 1 }
- match: { data_streams.0.indices.0.index_name: '.ds-logs-foobar-000001' }

View File

@ -105,7 +105,7 @@ setup:
- match: {data_streams.1.name: simple-data-stream2}
- match: {data_streams.1.backing_indices.0: .ds-simple-data-stream2-000001}
- match: {data_streams.1.backing_indices.1: .ds-simple-data-stream2-000002}
- match: {data_streams.1.timestamp_field: "@timestamp2"}
- match: {data_streams.1.timestamp_field: "@timestamp"}
- do:
indices.delete_data_stream:
@ -165,7 +165,7 @@ setup:
- match: {data_streams.1.name: simple-data-stream2}
- match: {data_streams.1.backing_indices.0: .ds-simple-data-stream2-000001}
- match: {data_streams.1.backing_indices.1: .ds-simple-data-stream2-000002}
- match: {data_streams.1.timestamp_field: "@timestamp2"}
- match: {data_streams.1.timestamp_field: "@timestamp"}
- do:
indices.delete_data_stream:

View File

@ -111,11 +111,11 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testBasicScenario() throws Exception {
putComposableIndexTemplate("id1", "@timestamp1", List.of("metrics-foo*"));
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("metrics-foo");
client().admin().indices().createDataStream(createDataStreamRequest).get();
putComposableIndexTemplate("id2", "@timestamp2", List.of("metrics-bar*"));
putComposableIndexTemplate("id2", "@timestamp", List.of("metrics-bar*"));
createDataStreamRequest = new CreateDataStreamAction.Request("metrics-bar");
client().admin().indices().createDataStream(createDataStreamRequest).get();
@ -125,15 +125,13 @@ public class DataStreamIT extends ESIntegTestCase {
assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(2));
DataStream firstDataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream();
assertThat(firstDataStream.getName(), equalTo("metrics-bar"));
assertThat(firstDataStream.getTimeStampField().getName(), equalTo("@timestamp2"));
assertThat(firstDataStream.getTimeStampField().getFieldMapping(), equalTo(Map.of("type", "date")));
assertThat(firstDataStream.getTimeStampField().getName(), equalTo("@timestamp"));
assertThat(firstDataStream.getIndices().size(), equalTo(1));
assertThat(firstDataStream.getIndices().get(0).getName(),
equalTo(DataStream.getDefaultBackingIndexName("metrics-bar", 1)));
DataStream dataStream = getDataStreamResponse.getDataStreams().get(1).getDataStream();
assertThat(dataStream.getName(), equalTo("metrics-foo"));
assertThat(dataStream.getTimeStampField().getName(), equalTo("@timestamp1"));
assertThat(dataStream.getTimeStampField().getFieldMapping(), equalTo(Map.of("type", "date")));
assertThat(dataStream.getTimeStampField().getName(), equalTo("@timestamp"));
assertThat(dataStream.getIndices().size(), equalTo(1));
assertThat(dataStream.getIndices().get(0).getName(),
equalTo(DataStream.getDefaultBackingIndexName("metrics-foo", 1)));
@ -144,19 +142,19 @@ public class DataStreamIT extends ESIntegTestCase {
assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue());
assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true));
java.util.Map<?, ?> mappings = getIndexResponse.getMappings().get(backingIndex).get("_doc").getSourceAsMap();
assertThat(ObjectPath.eval("properties.@timestamp2.type", mappings), is("date"));
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
backingIndex = DataStream.getDefaultBackingIndexName("metrics-foo", 1);
getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet();
assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue());
assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true));
mappings = getIndexResponse.getMappings().get(backingIndex).get("_doc").getSourceAsMap();
assertThat(ObjectPath.eval("properties.@timestamp1.type", mappings), is("date"));
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
int numDocsBar = randomIntBetween(2, 16);
indexDocs("metrics-bar", "@timestamp2", numDocsBar);
indexDocs("metrics-bar", "@timestamp", numDocsBar);
int numDocsFoo = randomIntBetween(2, 16);
indexDocs("metrics-foo", "@timestamp1", numDocsFoo);
indexDocs("metrics-foo", "@timestamp", numDocsFoo);
verifyDocs("metrics-bar", numDocsBar, 1, 1);
verifyDocs("metrics-foo", numDocsFoo, 1, 1);
@ -174,19 +172,19 @@ public class DataStreamIT extends ESIntegTestCase {
assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue());
assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true));
mappings = getIndexResponse.getMappings().get(backingIndex).get("_doc").getSourceAsMap();
assertThat(ObjectPath.eval("properties.@timestamp1.type", mappings), is("date"));
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
backingIndex = DataStream.getDefaultBackingIndexName("metrics-bar", 2);
getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices(backingIndex)).actionGet();
assertThat(getIndexResponse.getSettings().get(backingIndex), notNullValue());
assertThat(getIndexResponse.getSettings().get(backingIndex).getAsBoolean("index.hidden", null), is(true));
mappings = getIndexResponse.getMappings().get(backingIndex).get("_doc").getSourceAsMap();
assertThat(ObjectPath.eval("properties.@timestamp2.type", mappings), is("date"));
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
int numDocsBar2 = randomIntBetween(2, 16);
indexDocs("metrics-bar", "@timestamp2", numDocsBar2);
indexDocs("metrics-bar", "@timestamp", numDocsBar2);
int numDocsFoo2 = randomIntBetween(2, 16);
indexDocs("metrics-foo", "@timestamp1", numDocsFoo2);
indexDocs("metrics-foo", "@timestamp", numDocsFoo2);
verifyDocs("metrics-bar", numDocsBar + numDocsBar2, 1, 2);
verifyDocs("metrics-foo", numDocsFoo + numDocsFoo2, 1, 2);
@ -211,7 +209,7 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testOtherWriteOps() throws Exception {
putComposableIndexTemplate("id", "@timestamp1", List.of("metrics-foobar*"));
putComposableIndexTemplate("id", "@timestamp", List.of("metrics-foobar*"));
String dataStreamName = "metrics-foobar";
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
client().admin().indices().createDataStream(createDataStreamRequest).get();
@ -239,7 +237,7 @@ public class DataStreamIT extends ESIntegTestCase {
}
{
IndexRequest indexRequest = new IndexRequest(dataStreamName)
.source("{\"@timestamp1\": \"2020-12-12\"}", XContentType.JSON);
.source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON);
Exception e = expectThrows(IndexNotFoundException.class, () -> client().index(indexRequest).actionGet());
assertThat(e.getMessage(), equalTo("no such index [null]"));
}
@ -256,14 +254,14 @@ public class DataStreamIT extends ESIntegTestCase {
}
{
IndexRequest indexRequest = new IndexRequest(dataStreamName)
.source("{\"@timestamp1\": \"2020-12-12\"}", XContentType.JSON)
.source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON)
.opType(DocWriteRequest.OpType.CREATE);
IndexResponse indexResponse = client().index(indexRequest).actionGet();
assertThat(indexResponse.getIndex(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1)));
}
{
BulkRequest bulkRequest = new BulkRequest()
.add(new IndexRequest(dataStreamName).source("{\"@timestamp1\": \"2020-12-12\"}", XContentType.JSON)
.add(new IndexRequest(dataStreamName).source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON)
.opType(DocWriteRequest.OpType.CREATE));
BulkResponse bulkItemResponses = client().bulk(bulkRequest).actionGet();
assertThat(bulkItemResponses.getItems()[0].getIndex(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1)));
@ -360,7 +358,7 @@ public class DataStreamIT extends ESIntegTestCase {
Exception e = expectThrows(IllegalArgumentException.class,
() -> client().execute(PutComposableIndexTemplateAction.INSTANCE, createTemplateRequest).actionGet());
assertThat(e.getCause().getCause().getMessage(), equalTo("expected timestamp field [@timestamp], but found no timestamp field"));
assertThat(e.getCause().getCause().getMessage(), equalTo("the configured timestamp field [@timestamp] does not exist"));
}
public void testTimeStampValidationInvalidFieldMapping() throws Exception {
@ -383,18 +381,18 @@ public class DataStreamIT extends ESIntegTestCase {
Exception e = expectThrows(IllegalArgumentException.class,
() -> client().execute(PutComposableIndexTemplateAction.INSTANCE, createTemplateRequest).actionGet());
assertThat(e.getCause().getCause().getMessage(), equalTo("expected timestamp field [@timestamp] to be of types " +
"[date, date_nanos], but instead found type [keyword]"));
assertThat(e.getCause().getCause().getMessage(),
equalTo("the configured timestamp field [@timestamp] is of type [keyword], but [date,date_nanos] is expected"));
}
public void testResolvabilityOfDataStreamsInAPIs() throws Exception {
putComposableIndexTemplate("id", "ts", List.of("logs-*"));
putComposableIndexTemplate("id", "@timestamp", List.of("logs-*"));
String dataStreamName = "logs-foobar";
CreateDataStreamAction.Request request = new CreateDataStreamAction.Request(dataStreamName);
client().admin().indices().createDataStream(request).actionGet();
verifyResolvability(dataStreamName, client().prepareIndex(dataStreamName, "_doc")
.setSource("{\"ts\": \"2020-12-12\"}", XContentType.JSON)
.setSource("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON)
.setOpType(DocWriteRequest.OpType.CREATE),
false);
verifyResolvability(dataStreamName, refreshBuilder(dataStreamName), false);
@ -427,7 +425,7 @@ public class DataStreamIT extends ESIntegTestCase {
request = new CreateDataStreamAction.Request("logs-barbaz");
client().admin().indices().createDataStream(request).actionGet();
verifyResolvability("logs-barbaz", client().prepareIndex("logs-barbaz", "_doc")
.setSource("{\"ts\": \"2020-12-12\"}", XContentType.JSON)
.setSource("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON)
.setOpType(DocWriteRequest.OpType.CREATE),
false);
@ -461,7 +459,7 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testCannotDeleteComposableTemplateUsedByDataStream() throws Exception {
putComposableIndexTemplate("id", "@timestamp1", List.of("metrics-foobar*"));
putComposableIndexTemplate("id", "@timestamp", List.of("metrics-foobar*"));
String dataStreamName = "metrics-foobar-baz";
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
client().admin().indices().createDataStream(createDataStreamRequest).get();
@ -484,7 +482,7 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testAliasActionsFailOnDataStreams() throws Exception {
putComposableIndexTemplate("id1", "@timestamp1", List.of("metrics-foo*"));
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
String dataStreamName = "metrics-foo";
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
client().admin().indices().createDataStream(createDataStreamRequest).get();
@ -498,7 +496,7 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testAliasActionsFailOnDataStreamBackingIndices() throws Exception {
putComposableIndexTemplate("id1", "@timestamp1", List.of("metrics-foo*"));
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
String dataStreamName = "metrics-foo";
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
client().admin().indices().createDataStream(createDataStreamRequest).get();
@ -514,84 +512,6 @@ public class DataStreamIT extends ESIntegTestCase {
"support aliases."));
}
public void testChangeTimestampFieldInComposableTemplatePriorToRollOver() throws Exception {
putComposableIndexTemplate("id1", "@timestamp", List.of("logs-foo*"));
// Index doc that triggers creation of a data stream
IndexRequest indexRequest =
new IndexRequest("logs-foobar").source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON).opType("create");
IndexResponse indexResponse = client().index(indexRequest).actionGet();
assertThat(indexResponse.getIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 1)));
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 1), "properties.@timestamp");
// Rollover data stream
RolloverResponse rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("logs-foobar", null)).get();
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 2)));
assertTrue(rolloverResponse.isRolledOver());
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 2), "properties.@timestamp");
// Index another doc into a data stream
indexRequest = new IndexRequest("logs-foobar").source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON).opType("create");
indexResponse = client().index(indexRequest).actionGet();
assertThat(indexResponse.getIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 2)));
// Change the template to have a different timestamp field
putComposableIndexTemplate("id1", "@timestamp2", List.of("logs-foo*"));
// Rollover again, eventhough there is no mapping in the template, the timestamp field mapping in data stream
// should be applied in the new backing index
rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("logs-foobar", null)).get();
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 3)));
assertTrue(rolloverResponse.isRolledOver());
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 3), "properties.@timestamp");
// Index another doc into a data stream
indexRequest = new IndexRequest("logs-foobar").source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON).opType("create");
indexResponse = client().index(indexRequest).actionGet();
assertThat(indexResponse.getIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 3)));
DeleteDataStreamAction.Request deleteDataStreamRequest = new DeleteDataStreamAction.Request(new String[]{"logs-foobar"});
client().admin().indices().deleteDataStream(deleteDataStreamRequest).actionGet();
}
public void testNestedTimestampField() throws Exception {
String mapping = "{\n" +
" \"properties\": {\n" +
" \"event\": {\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }";;
putComposableIndexTemplate("id1", "event.@timestamp", mapping, List.of("logs-foo*"), null);
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("logs-foobar");
client().admin().indices().createDataStream(createDataStreamRequest).get();
GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request("logs-foobar");
GetDataStreamAction.Response getDataStreamResponse = client().admin().indices().getDataStreams(getDataStreamRequest).actionGet();
assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1));
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo("logs-foobar"));
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getTimeStampField().getName(),
equalTo("event.@timestamp"));
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getTimeStampField().getFieldMapping(),
equalTo(Map.of("type", "date")));
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 1), "properties.event.properties.@timestamp");
// Change the template to have a different timestamp field
putComposableIndexTemplate("id1", "@timestamp2", List.of("logs-foo*"));
RolloverResponse rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("logs-foobar", null)).actionGet();
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 2)));
assertTrue(rolloverResponse.isRolledOver());
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 2), "properties.event.properties.@timestamp");
DeleteDataStreamAction.Request deleteDataStreamRequest = new DeleteDataStreamAction.Request(new String[]{"logs-foobar"});
client().admin().indices().deleteDataStream(deleteDataStreamRequest).actionGet();
}
public void testTimestampFieldCustomAttributes() throws Exception {
String mapping = "{\n" +
" \"properties\": {\n" +
@ -614,20 +534,7 @@ public class DataStreamIT extends ESIntegTestCase {
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo("logs-foobar"));
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getTimeStampField().getName(), equalTo("@timestamp"));
java.util.Map<?, ?> expectedTimestampMapping = Map.of("type", "date", "format", "yyyy-MM", "meta", Map.of("x", "y"));
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getTimeStampField().getFieldMapping(),
equalTo(expectedTimestampMapping));
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 1), "properties.@timestamp", expectedTimestampMapping);
// Change the template to have a different timestamp field
putComposableIndexTemplate("id1", "@timestamp2", List.of("logs-foo*"));
RolloverResponse rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("logs-foobar", null)).actionGet();
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("logs-foobar", 2)));
assertTrue(rolloverResponse.isRolledOver());
assertBackingIndex(DataStream.getDefaultBackingIndexName("logs-foobar", 2), "properties.@timestamp", expectedTimestampMapping);
DeleteDataStreamAction.Request deleteDataStreamRequest = new DeleteDataStreamAction.Request(new String[]{"logs-foobar"});
client().admin().indices().deleteDataStream(deleteDataStreamRequest).actionGet();
}
public void testUpdateMappingViaDataStream() throws Exception {
@ -738,25 +645,25 @@ public class DataStreamIT extends ESIntegTestCase {
}
public void testSearchAllResolvesDataStreams() throws Exception {
putComposableIndexTemplate("id1", "@timestamp1", List.of("metrics-foo*"));
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("metrics-foo");
client().admin().indices().createDataStream(createDataStreamRequest).get();
putComposableIndexTemplate("id2", "@timestamp2", List.of("metrics-bar*"));
putComposableIndexTemplate("id2", "@timestamp", List.of("metrics-bar*"));
createDataStreamRequest = new CreateDataStreamAction.Request("metrics-bar");
client().admin().indices().createDataStream(createDataStreamRequest).get();
int numDocsBar = randomIntBetween(2, 16);
indexDocs("metrics-bar", "@timestamp2", numDocsBar);
indexDocs("metrics-bar", "@timestamp", numDocsBar);
int numDocsFoo = randomIntBetween(2, 16);
indexDocs("metrics-foo", "@timestamp1", numDocsFoo);
indexDocs("metrics-foo", "@timestamp", numDocsFoo);
RolloverResponse rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("metrics-foo", null)).get();
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("metrics-foo", 2)));
// ingest some more data in the rolled data stream
int numDocsRolledFoo = randomIntBetween(2, 16);
indexDocs("metrics-foo", "@timestamp1", numDocsRolledFoo);
indexDocs("metrics-foo", "@timestamp", numDocsRolledFoo);
SearchRequest searchRequest = new SearchRequest("*");
SearchResponse searchResponse = client().search(searchRequest).actionGet();

View File

@ -199,7 +199,7 @@ public class TransportSimulateIndexTemplateAction
// empty request mapping as the user can't specify any explicit mappings via the simulate api
List<Map<String, Map<String, Object>>> mappings = MetadataCreateIndexService.collectV2Mappings(
Collections.emptyMap(), simulatedState, matchingTemplate, xContentRegistry);
Collections.emptyMap(), simulatedState, matchingTemplate, xContentRegistry, indexName);
CompressedXContent mergedMapping = indicesService.<CompressedXContent, Exception>withTempIndexService(indexMetadata,
tempIndexService -> {

View File

@ -32,6 +32,8 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TimestampFieldMapper;
import java.io.IOException;
import java.util.Collections;
@ -39,6 +41,9 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.cluster.metadata.DataStream.TimestampField.FIXED_TIMESTAMP_FIELD;
/**
* An index template is comprised of a set of index patterns, an optional template, and a list of
* ids corresponding to component templates that should be composed in order when creating a new
@ -257,6 +262,10 @@ public class ComposableIndexTemplate extends AbstractDiffable<ComposableIndexTem
private final String timestampField;
public DataStreamTemplate(String timestampField) {
if (FIXED_TIMESTAMP_FIELD.equals(timestampField) == false) {
throw new IllegalArgumentException("unexpected timestamp field [" + timestampField + "]");
}
this.timestampField = timestampField;
}
@ -268,6 +277,14 @@ public class ComposableIndexTemplate extends AbstractDiffable<ComposableIndexTem
this(in.readString());
}
/**
* @return a mapping snippet for a backing index with `_timestamp` meta field mapper properly configured.
*/
public Map<String, Object> getDataSteamMappingSnippet() {
return singletonMap(MapperService.SINGLE_MAPPING_NAME, singletonMap(TimestampFieldMapper.NAME,
singletonMap("path", timestampField)));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(timestampField);

View File

@ -29,20 +29,14 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService.ALLOWED_TIMESTAMPFIELD_TYPES;
import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService.convertFieldPathToMappingPath;
public final class DataStream extends AbstractDiffable<DataStream> implements ToXContentObject {
public static final String BACKING_INDEX_PREFIX = ".ds-";
@ -212,68 +206,42 @@ public final class DataStream extends AbstractDiffable<DataStream> implements To
public static final class TimestampField implements Writeable, ToXContentObject {
public static final String FIXED_TIMESTAMP_FIELD = "@timestamp";
static ParseField NAME_FIELD = new ParseField("name");
static ParseField FIELD_MAPPING_FIELD = new ParseField("mapping");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<TimestampField, Void> PARSER = new ConstructingObjectParser<>(
"timestamp_field",
args -> new TimestampField((String) args[0], (Map<String, Object>) args[1])
args -> new TimestampField((String) args[0])
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.mapOrdered(), FIELD_MAPPING_FIELD);
}
private final String name;
private final Map<String, Object> fieldMapping;
public TimestampField(String name, Map<String, Object> fieldMapping) {
assert fieldMapping.containsKey("type") : "no type defined for mapping of timestamp_field";
assert ALLOWED_TIMESTAMPFIELD_TYPES.contains(fieldMapping.get("type")) :
"invalid type defined for mapping of timestamp_field";
public TimestampField(String name) {
if (FIXED_TIMESTAMP_FIELD.equals(name) == false) {
throw new IllegalArgumentException("unexpected timestamp field [" + name + "]");
}
this.name = name;
this.fieldMapping = Collections.unmodifiableMap(fieldMapping);
}
public TimestampField(StreamInput in) throws IOException {
this(in.readString(), in.readMap());
}
/**
* Creates a map representing the full timestamp field mapping, taking into
* account if the timestamp field is nested under object mappers (its path
* contains dots).
*/
public Map<String, Map<String, Object>> getTimestampFieldMapping() {
String mappingPath = convertFieldPathToMappingPath(name);
String parentObjectFieldPath = mappingPath.substring(0, mappingPath.lastIndexOf('.'));
String leafFieldName = mappingPath.substring(mappingPath.lastIndexOf('.') + 1);
Map<String, Object> result = new HashMap<>();
Map<String, Object> current = result;
for (String key : parentObjectFieldPath.split("\\.")) {
Map<String, Object> map = new HashMap<>();
current.put(key, map);
current = map;
}
current.put(leafFieldName, fieldMapping);
return Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, result);
this(in.readString());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeMap(fieldMapping);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME_FIELD.getPreferredName(), name);
builder.field(FIELD_MAPPING_FIELD.getPreferredName(), fieldMapping);
builder.endObject();
return builder;
}
@ -282,22 +250,17 @@ public final class DataStream extends AbstractDiffable<DataStream> implements To
return name;
}
public Map<String, Object> getFieldMapping() {
return fieldMapping;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TimestampField that = (TimestampField) o;
return name.equals(that.name) &&
fieldMapping.equals(that.fieldMapping);
return name.equals(that.name);
}
@Override
public int hashCode() {
return Objects.hash(name, fieldMapping);
return Objects.hash(name);
}
}
}

View File

@ -32,28 +32,19 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TimestampFieldMapper;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
public class MetadataCreateDataStreamService {
private static final Logger logger = LogManager.getLogger(MetadataCreateDataStreamService.class);
public static final Set<String> ALLOWED_TIMESTAMPFIELD_TYPES =
new LinkedHashSet<>(List.of(DateFieldMapper.CONTENT_TYPE, DateFieldMapper.DATE_NANOS_CONTENT_TYPE));
private final ClusterService clusterService;
private final ActiveShardsObserver activeShardsObserver;
@ -157,13 +148,7 @@ public class MetadataCreateDataStreamService {
assert firstBackingIndex.mapping() != null : "no mapping found for backing index [" + firstBackingIndexName + "]";
String fieldName = template.getDataStreamTemplate().getTimestampField();
Map<String, Object> mapping = firstBackingIndex.mapping().getSourceAsMap();
Map<String, Object> timeStampFieldMapping = ObjectPath.eval(convertFieldPathToMappingPath(fieldName), mapping);
DataStream.TimestampField timestampField = new DataStream.TimestampField(
fieldName,
timeStampFieldMapping
);
DataStream.TimestampField timestampField = new DataStream.TimestampField(fieldName);
DataStream newDataStream = new DataStream(request.name, timestampField,
Collections.singletonList(firstBackingIndex.getIndex()));
Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(newDataStream);
@ -185,31 +170,16 @@ public class MetadataCreateDataStreamService {
}
public static void validateTimestampFieldMapping(String timestampFieldName, MapperService mapperService) {
MappedFieldType timestampFieldMapper = mapperService.fieldType(timestampFieldName);
if (timestampFieldMapper == null) {
throw new IllegalArgumentException("expected timestamp field [" + timestampFieldName + "], but found no timestamp field");
}
String type = timestampFieldMapper.typeName();
if (ALLOWED_TIMESTAMPFIELD_TYPES.contains(type) == false) {
throw new IllegalArgumentException("expected timestamp field [" + timestampFieldName + "] to be of types " +
ALLOWED_TIMESTAMPFIELD_TYPES + ", but instead found type [" + type + "]");
}
}
TimestampFieldMapper fieldMapper = (TimestampFieldMapper) mapperService.documentMapper().mappers().getMapper("_timestamp");
assert fieldMapper != null : "[_timestamp] meta field mapper must exist";
public static String convertFieldPathToMappingPath(String fieldPath) {
// The mapping won't allow such fields, so this is a sanity check:
assert Arrays.stream(fieldPath.split("\\.")).filter(String::isEmpty).count() == 0L ||
fieldPath.startsWith(".") ||
fieldPath.endsWith(".") : "illegal field path [" + fieldPath + "]";
String mappingPath;
if (fieldPath.indexOf('.') == -1) {
mappingPath = "properties." + fieldPath;
} else {
mappingPath = "properties." + fieldPath.replace(".", ".properties.");
if (timestampFieldName.equals(fieldMapper.getPath()) == false) {
throw new IllegalArgumentException("[_timestamp] meta field doesn't point to data stream timestamp field [" +
timestampFieldName + "]");
}
return mappingPath;
// Sanity check (this validation logic should already have been executed when merging mappings):
fieldMapper.validate(mapperService.documentMapper().mappers());
}
}

View File

@ -65,7 +65,6 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TimestampFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
@ -108,6 +107,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_CREATION_
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService.validateTimestampFieldMapping;
/**
* Service responsible for submitting create index requests
@ -396,7 +396,7 @@ public class MetadataCreateIndexService {
// create the index here (on the master) to validate it can be created, as well as adding the mapping
return indicesService.<ClusterState, Exception>withTempIndexService(temporaryIndexMeta, indexService -> {
try {
updateIndexMappingsAndBuildSortOrder(indexService, mappings, sourceMetadata);
updateIndexMappingsAndBuildSortOrder(indexService, request, mappings, sourceMetadata);
} catch (Exception e) {
logger.debug("failed on parsing mappings on index creation [{}]", request.index());
throw e;
@ -496,27 +496,8 @@ public class MetadataCreateIndexService {
throws Exception {
logger.debug("applying create index request using composable template [{}]", templateName);
final List<Map<String, Map<String, Object>>> mappings = collectV2Mappings(
request.mappings(), currentState, templateName, xContentRegistry);
if (request.dataStreamName() != null) {
String timestampField;
DataStream dataStream = currentState.metadata().dataStreams().get(request.dataStreamName());
if (dataStream != null) {
// Data stream already exists and a new backing index gets added. For example during rollover.
timestampField = dataStream.getTimeStampField().getName();
// Use the timestamp field mapping as was recorded at the time the data stream was created
mappings.add(dataStream.getTimeStampField().getTimestampFieldMapping());
} else {
// The data stream doesn't yet exist and the first backing index gets created. Resolve ts field from template.
// (next time, the data stream instance does exist)
ComposableIndexTemplate template = currentState.metadata().templatesV2().get(templateName);
timestampField = template.getDataStreamTemplate().getTimestampField();
}
// Add mapping for timestamp field mapper last, so that it can't be overwritten:
mappings.add(singletonMap("_doc", singletonMap(TimestampFieldMapper.NAME, singletonMap("path", timestampField))));
}
final List<Map<String, Map<String, Object>>> mappings =
collectV2Mappings(request.mappings(), currentState, templateName, xContentRegistry, request.index());
final Settings aggregatedIndexSettings =
aggregateIndexSettings(currentState, request,
MetadataIndexTemplateService.resolveSettings(currentState.metadata(), templateName),
@ -536,10 +517,12 @@ public class MetadataCreateIndexService {
public static List<Map<String, Map<String, Object>>> collectV2Mappings(final Map<String, String> requestMappings,
final ClusterState currentState,
final String templateName,
final NamedXContentRegistry xContentRegistry) throws Exception {
final NamedXContentRegistry xContentRegistry,
final String indexName) throws Exception {
List<Map<String, Map<String, Object>>> result = new ArrayList<>();
List<CompressedXContent> templateMappings = MetadataIndexTemplateService.collectMappings(currentState, templateName);
List<CompressedXContent> templateMappings =
MetadataIndexTemplateService.collectMappings(currentState, templateName, indexName);
for (CompressedXContent templateMapping : templateMappings) {
Map<String, Object> parsedTemplateMapping = MapperService.parseMapping(xContentRegistry, templateMapping.string());
result.add(singletonMap(MapperService.SINGLE_MAPPING_NAME, parsedTemplateMapping));
@ -910,7 +893,9 @@ public class MetadataCreateIndexService {
return blocksBuilder;
}
private static void updateIndexMappingsAndBuildSortOrder(IndexService indexService, List<Map<String, Map<String, Object>>> mappings,
private static void updateIndexMappingsAndBuildSortOrder(IndexService indexService,
CreateIndexClusterStateUpdateRequest request,
List<Map<String, Map<String, Object>>> mappings,
@Nullable IndexMetadata sourceMetadata) throws IOException {
MapperService mapperService = indexService.mapperService();
for (Map<String, Map<String, Object>> mapping : mappings) {
@ -928,6 +913,9 @@ public class MetadataCreateIndexService {
// (when all shards are copied in a single place).
indexService.getIndexSortSupplier().get();
}
if (request.dataStreamName() != null) {
validateTimestampFieldMapping("@timestamp", mapperService);
}
}
private static void validateActiveShardCount(ActiveShardCount waitForActiveShards, IndexMetadata indexMetadata) {

View File

@ -38,6 +38,7 @@ import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationLogger;
@ -47,7 +48,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -57,6 +60,8 @@ import org.elasticsearch.indices.IndexTemplateMissingException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexTemplateException;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -908,7 +913,9 @@ public class MetadataIndexTemplateService {
/**
* Collect the given v2 template into an ordered list of mappings.
*/
public static List<CompressedXContent> collectMappings(final ClusterState state, final String templateName) {
public static List<CompressedXContent> collectMappings(final ClusterState state,
final String templateName,
final String indexName) {
final ComposableIndexTemplate template = state.metadata().templatesV2().get(templateName);
assert template != null : "attempted to resolve mappings for a template [" + templateName +
"] that did not exist in the cluster state";
@ -928,6 +935,23 @@ public class MetadataIndexTemplateService {
Optional.ofNullable(template.template())
.map(Template::mappings)
.ifPresent(mappings::add);
// Only include _timestamp mapping snippet if creating backing index.
if (indexName.startsWith(DataStream.BACKING_INDEX_PREFIX)) {
// Only if template has data stream definition this should be added and
// adding this template last, since _timestamp field should have highest precedence:
Optional.ofNullable(template.getDataStreamTemplate())
.map(ComposableIndexTemplate.DataStreamTemplate::getDataSteamMappingSnippet)
.map(mapping -> {
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
builder.value(mapping);
return new CompressedXContent(BytesReference.bytes(builder));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
})
.ifPresent(mappings::add);
}
return Collections.unmodifiableList(mappings);
}
@ -1073,16 +1097,19 @@ public class MetadataIndexTemplateService {
// shard id and the current timestamp
xContentRegistry, tempIndexService.newQueryShardContext(0, null, () -> 0L, null));
// triggers inclusion of _timestamp field and its validation:
String indexName = DataStream.BACKING_INDEX_PREFIX + temporaryIndexName;
// Parse mappings to ensure they are valid after being composed
List<CompressedXContent> mappings = collectMappings(stateWithIndex, templateName);
List<CompressedXContent> mappings = collectMappings(stateWithIndex, templateName, indexName );
try {
MapperService mapperService = tempIndexService.mapperService();
for (CompressedXContent mapping : mappings) {
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MergeReason.INDEX_TEMPLATE);
if (template.getDataStreamTemplate() != null) {
String tsFieldName = template.getDataStreamTemplate().getTimestampField();
validateTimestampFieldMapping(tsFieldName, mapperService);
}
}
if (template.getDataStreamTemplate() != null) {
String tsFieldName = template.getDataStreamTemplate().getTimestampField();
validateTimestampFieldMapping(tsFieldName, mapperService);
}
} catch (Exception e) {
throw new IllegalArgumentException("invalid composite mappings for [" + templateName + "]", e);

View File

@ -178,7 +178,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
// on the field this meta field refers to:
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
dateFieldMapper.doXContentBody(builder, false, EMPTY_PARAMS);
dateFieldMapper.doXContentBody(builder, false, EMPTY_PARAMS);
builder.endObject();
Map<String, Object> configuredSettings =
XContentHelper.convertToMap(BytesReference.bytes(builder), false, XContentType.JSON).v2();
@ -197,6 +197,10 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
}
}
public String getPath() {
return path;
}
@Override
public void preParse(ParseContext context) throws IOException {
}

View File

@ -63,8 +63,8 @@ public class ResolveIndexTests extends ESTestCase {
private final Object[][] dataStreams = new Object[][]{
// name, timestampField, numBackingIndices
{"logs-mysql-prod", "@timestamp1", 4},
{"logs-mysql-test", "@timestamp2", 2}
{"logs-mysql-prod", "@timestamp", 4},
{"logs-mysql-test", "@timestamp", 2}
};
private Metadata metadata = buildMetadata(dataStreams, indices);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.rollover;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
@ -55,9 +56,14 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.TimestampFieldMapper;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException;
@ -543,12 +549,24 @@ public class MetadataRolloverServiceTests extends ESTestCase {
ThreadPool testThreadPool = new TestThreadPool(getTestName());
try {
Settings settings = Settings.builder()
.put("index.version.created", Version.CURRENT)
.build();
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(settings, new ContentPath(0));
TimestampFieldMapper.Builder fieldBuilder = new TimestampFieldMapper.Builder();
fieldBuilder.setPath("@timestamp");
DateFieldMapper dateFieldMapper = new DateFieldMapper.Builder("@timestamp").build(builderContext);
DocumentFieldMappers documentFieldMappers =
new DocumentFieldMappers(Arrays.asList(fieldBuilder.build(builderContext), dateFieldMapper),
Collections.emptyList(), new StandardAnalyzer());
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
when(env.sharedDataFile()).thenReturn(null);
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
DocumentMapper documentMapper = mock(DocumentMapper.class);
when(documentMapper.mappers()).thenReturn(documentFieldMappers);
when(documentMapper.type()).thenReturn("_doc");
CompressedXContent mapping = new CompressedXContent(generateMapping(dataStream.getTimeStampField().getName()));
when(documentMapper.mappingSource()).thenReturn(mapping);

View File

@ -148,7 +148,7 @@ public class ComposableIndexTemplateTests extends AbstractDiffableSerializationT
if (randomBoolean()) {
return null;
} else {
return new ComposableIndexTemplate.DataStreamTemplate(randomAlphaOfLength(8));
return new ComposableIndexTemplate.DataStreamTemplate("@timestamp");
}
}

View File

@ -18,9 +18,7 @@
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.cluster.metadata.DataStream.TimestampField;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.Map;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
@ -51,7 +49,7 @@ public class DataStreamTests extends AbstractSerializingTestCase<DataStream> {
long generation = indices.size() + randomLongBetween(1, 128);
String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
indices.add(new Index(getDefaultBackingIndexName(dataStreamName, generation), UUIDs.randomBase64UUID(random())));
return new DataStream(dataStreamName, createTimestampField(randomAlphaOfLength(10)), indices, generation);
return new DataStream(dataStreamName, createTimestampField("@timestamp"), indices, generation);
}
@Override
@ -167,24 +165,4 @@ public class DataStreamTests extends AbstractSerializingTestCase<DataStream> {
Index newBackingIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random()));
expectThrows(IllegalArgumentException.class, () -> original.replaceBackingIndex(indices.get(writeIndexPosition), newBackingIndex));
}
public void testGetTimestampFieldMapping() {
TimestampField field = new TimestampField("@timestamp", Map.of("type", "date", "meta", Map.of("x", "y")));
java.util.Map<String, java.util.Map<String, Object>> mappings = field.getTimestampFieldMapping();
java.util.Map<String, java.util.Map<String, Object>> expectedMapping = Map.of("_doc", Map.of("properties",
Map.of("@timestamp", Map.of("type", "date", "meta", Map.of("x", "y")))));
assertThat(mappings, equalTo(expectedMapping));
TimestampField nestedField = new TimestampField("event.attr.@timestamp", Map.of("type", "date", "meta", Map.of("x", "y")));
mappings = nestedField.getTimestampFieldMapping();
expectedMapping = Map.of("_doc", Map.of("properties", Map.of("event", Map.of("properties", Map.of("attr",
Map.of("properties", Map.of("@timestamp", Map.of("type", "date", "meta", Map.of("x", "y")))))))));
assertThat(mappings, equalTo(expectedMapping));
}
public void testDataStreamsAreImmutable() {
DataStream ds = randomInstance();
expectThrows(UnsupportedOperationException.class, () -> ds.getIndices().clear());
expectThrows(UnsupportedOperationException.class, () -> ds.getTimeStampField().getFieldMapping().clear());
}
}

View File

@ -1710,7 +1710,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
Metadata.Builder mdBuilder = Metadata.builder()
.put(backingIndex, false)
.put(new DataStream(dataStreamName, createTimestampField("ts"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(backingIndex.getIndex()), 1));
ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build();
@ -1819,7 +1819,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
Metadata.Builder mdBuilder = Metadata.builder()
.put(index1, false)
.put(index2, false)
.put(new DataStream(dataStreamName, createTimestampField("ts"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(index1.getIndex(), index2.getIndex()), 2));
ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build();
@ -2021,8 +2021,9 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
.put(index3, false)
.put(index4, false)
.put(justAnIndex, false)
.put(new DataStream(dataStream1, createTimestampField("ts"), Arrays.asList(index1.getIndex(), index2.getIndex())))
.put(new DataStream(dataStream2, createTimestampField("ts"), Arrays.asList(index3.getIndex(), index4.getIndex())))).build();
.put(new DataStream(dataStream1, createTimestampField("@timestamp"), Arrays.asList(index1.getIndex(), index2.getIndex())))
.put(new DataStream(dataStream2, createTimestampField("@timestamp"), Arrays.asList(index3.getIndex(), index4.getIndex()))))
.build();
List<String> names = indexNameExpressionResolver.dataStreamNames(state, IndicesOptions.lenientExpand(), "log*");
assertEquals(Collections.singletonList(dataStream1), names);

View File

@ -163,46 +163,21 @@ public class MetadataCreateDataStreamServiceTests extends ESTestCase {
Exception e = expectThrows(IllegalArgumentException.class,
() -> validateTimestampFieldMapping("@timestamp", createMapperService("{}")));
assertThat(e.getMessage(),
equalTo("expected timestamp field [@timestamp], but found no timestamp field"));
equalTo("[_timestamp] meta field doesn't point to data stream timestamp field [@timestamp]"));
String mapping = generateMapping("@timestamp2", "date");
e = expectThrows(IllegalArgumentException.class,
() -> validateTimestampFieldMapping("@timestamp", createMapperService(mapping)));
assertThat(e.getMessage(),
equalTo("expected timestamp field [@timestamp], but found no timestamp field"));
equalTo("[_timestamp] meta field doesn't point to data stream timestamp field [@timestamp]"));
}
public void testValidateTimestampFieldMappingInvalidFieldType() {
String mapping = generateMapping("@timestamp", "keyword");
Exception e = expectThrows(IllegalArgumentException.class,
() -> validateTimestampFieldMapping("@timestamp", createMapperService(mapping)));
assertThat(e.getMessage(), equalTo("expected timestamp field [@timestamp] to be of types [date, date_nanos], " +
"but instead found type [keyword]"));
}
public void testValidateNestedTimestampFieldMapping() throws Exception {
String fieldType = randomBoolean() ? "date" : "date_nanos";
String mapping = "{\n" +
" \"properties\": {\n" +
" \"event\": {\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"" + fieldType + "\"\n" +
" },\n" +
" \"another_field\": {\n" +
" \"type\": \"keyword\"\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }";
MapperService mapperService = createMapperService(mapping);
validateTimestampFieldMapping("event.@timestamp", mapperService);
Exception e = expectThrows(IllegalArgumentException.class,
() -> validateTimestampFieldMapping("event.another_field", mapperService));
assertThat(e.getMessage(), equalTo("expected timestamp field [event.another_field] to be of types [date, date_nanos], " +
"but instead found type [keyword]"));
assertThat(e.getMessage(), equalTo("the configured timestamp field [@timestamp] is of type [keyword], " +
"but [date,date_nanos] is expected"));
}
private static MetadataCreateIndexService getMetadataCreateIndexService() throws Exception {
@ -234,6 +209,9 @@ public class MetadataCreateDataStreamServiceTests extends ESTestCase {
static String generateMapping(String timestampFieldName, String type) {
return "{\n" +
" \"_timestamp\": {\n" +
" \"path\": \"" + timestampFieldName + "\"\n" +
" }," +
" \"properties\": {\n" +
" \"" + timestampFieldName + "\": {\n" +
" \"type\": \"" + type + "\"\n" +

View File

@ -710,7 +710,7 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
Arrays.asList("ct_low", "ct_high"), 0L, 1L, null, null);
state = service.addIndexTemplateV2(state, true, "my-template", it);
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "my-template");
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index");
assertNotNull(mappings);
assertThat(mappings.size(), equalTo(3));
@ -776,7 +776,7 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
Arrays.asList("ct_low", "ct_high"), 0L, 1L, null, null);
state = service.addIndexTemplateV2(state, true, "my-template", it);
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "my-template");
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index");
assertNotNull(mappings);
assertThat(mappings.size(), equalTo(3));
@ -1078,7 +1078,7 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
state = ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata())
.put(new DataStream("unreferenced",
new DataStream.TimestampField("@timestamp", Collections.singletonMap("type", "date")),
new DataStream.TimestampField("@timestamp"),
Collections.singletonList(new Index(".ds-unreferenced-000001", "uuid2"))))
.put(IndexMetadata.builder(".ds-unreferenced-000001")
.settings(Settings.builder()
@ -1090,15 +1090,23 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
.build())
.build();
ComposableIndexTemplate template = new ComposableIndexTemplate(Collections.singletonList("logs-*-*"), null, null,
100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
String mapping = "{\n" +
" \"properties\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }\n" +
" }\n" +
" }";
Template mappingTemplate = new Template(null, new CompressedXContent(mapping), null);
ComposableIndexTemplate template = new ComposableIndexTemplate(Collections.singletonList("logs-*-*"),
mappingTemplate, null, 100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
state = service.addIndexTemplateV2(state, false, "logs", template);
ClusterState stateWithDS = ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata())
.put(new DataStream("logs-mysql-default",
new DataStream.TimestampField("@timestamp", Collections.singletonMap("type", "date")),
new DataStream.TimestampField("@timestamp"),
Collections.singletonList(new Index(".ds-logs-mysql-default-000001", "uuid"))))
.put(IndexMetadata.builder(".ds-logs-mysql-default-000001")
.settings(Settings.builder()
@ -1134,8 +1142,8 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
// Change the pattern to one that doesn't match the data stream
e = expectThrows(IllegalArgumentException.class, () -> {
ComposableIndexTemplate newTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-postgres-*"), null, null,
100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
ComposableIndexTemplate newTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-postgres-*"), mappingTemplate,
null, 100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
service.addIndexTemplateV2(stateWithDS, false, "logs", newTemplate);
});
@ -1144,8 +1152,8 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
"cause data streams [unreferenced, logs-mysql-default] to no longer match a data stream template"));
// Add an additional template that matches our data stream at a lower priority
ComposableIndexTemplate mysqlTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-mysql-*"), null, null,
50L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
ComposableIndexTemplate mysqlTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-mysql-*"), mappingTemplate,
null, 50L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
ClusterState stateWithDSAndTemplate = service.addIndexTemplateV2(stateWithDS, false, "logs-mysql", mysqlTemplate);
// We should be able to replace the "logs" template, because we have the "logs-mysql" template that can handle the data stream

View File

@ -1054,7 +1054,7 @@ public class MetadataTests extends ESTestCase {
backingIndices.add(im.getIndex());
}
b.put(new DataStream(dataStreamName, createTimestampField("ts"), backingIndices, lastBackingIndexNum));
b.put(new DataStream(dataStreamName, createTimestampField("@timestamp"), backingIndices, lastBackingIndexNum));
Metadata metadata = b.build();
assertThat(metadata.dataStreams().size(), equalTo(1));
assertThat(metadata.dataStreams().get(dataStreamName).getName(), equalTo(dataStreamName));
@ -1072,7 +1072,7 @@ public class MetadataTests extends ESTestCase {
indices.add(idx.getIndex());
b.put(idx, true);
}
b.put(new DataStream(name, createTimestampField("ts"), indices, indices.size()));
b.put(new DataStream(name, createTimestampField("@timestamp"), indices, indices.size()));
}
Metadata metadata = b.build();
@ -1137,7 +1137,7 @@ public class MetadataTests extends ESTestCase {
}
DataStream dataStream = new DataStream(
dataStreamName,
createTimestampField("ts"),
createTimestampField("@timestamp"),
backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()),
backingIndices.size()
);
@ -1211,7 +1211,7 @@ public class MetadataTests extends ESTestCase {
}
DataStream dataStream = new DataStream(
dataStreamName,
createTimestampField("ts"),
createTimestampField("@timestamp"),
backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()),
backingIndices.size()
);
@ -1313,7 +1313,7 @@ public class MetadataTests extends ESTestCase {
b.put(im, false);
backingIndices.add(im.getIndex());
}
b.put(new DataStream(dataStreamName, createTimestampField("ts"), backingIndices, lastBackingIndexNum));
b.put(new DataStream(dataStreamName, createTimestampField("@timestamp"), backingIndices, lastBackingIndexNum));
return new CreateIndexResult(indices, backingIndices, b.build());
}

View File

@ -104,7 +104,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
.put(idx2, false)
.put(new DataStream("data-stream1", createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(idx1.getIndex())))
.put(new DataStream("data-stream2", createTimestampField("@timestamp2"),
.put(new DataStream("data-stream2", createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(idx2.getIndex())))
.build();
@ -159,7 +159,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
assertThat(parsedMetadata.dataStreams().get("data-stream1").getIndices(), contains(idx1.getIndex()));
assertNotNull(parsedMetadata.dataStreams().get("data-stream2"));
assertThat(parsedMetadata.dataStreams().get("data-stream2").getName(), is("data-stream2"));
assertThat(parsedMetadata.dataStreams().get("data-stream2").getTimeStampField().getName(), is("@timestamp2"));
assertThat(parsedMetadata.dataStreams().get("data-stream2").getTimeStampField().getName(), is("@timestamp"));
assertThat(parsedMetadata.dataStreams().get("data-stream2").getIndices(), contains(idx2.getIndex()));
}

View File

@ -221,7 +221,7 @@ public class WildcardExpressionResolverTests extends ESTestCase {
.put(indexBuilder("bar_index").state(State.OPEN).putAlias(AliasMetadata.builder("foo_alias")))
.put(firstBackingIndexMetadata, true)
.put(secondBackingIndexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("timestamp"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(firstBackingIndexMetadata.getIndex(), secondBackingIndexMetadata.getIndex())));
ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build();

View File

@ -164,4 +164,39 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
assertConflicts(mapping1, mapping2, parser, "cannot update path setting for [_timestamp]");
}
public void testDifferentTSField() throws IOException {
String mapping = "{\n" +
" \"_timestamp\": {\n" +
" \"path\": \"event.my_timestamp\"\n" +
" },\n" +
" \"properties\": {\n" +
" \"event\": {\n" +
" \"properties\": {\n" +
" \"my_timestamp\": {\n" +
" \"type\": \"date\"" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }";
DocumentMapper docMapper = createIndex("test").mapperService()
.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("event.my_timestamp", "2020-12-12")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields("event.my_timestamp").length, equalTo(2));
Exception e = expectThrows(MapperException.class, () -> docMapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("event.timestamp", "2020-12-12")
.endObject()),
XContentType.JSON)));
assertThat(e.getCause().getMessage(), equalTo("data stream timestamp field [event.my_timestamp] is missing"));
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.cluster;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.Map;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
@ -54,6 +53,6 @@ public final class DataStreamTestHelper {
}
public static DataStream.TimestampField createTimestampField(String fieldName) {
return new DataStream.TimestampField(fieldName, Map.of("type", "date"));
return new DataStream.TimestampField(fieldName);
}
}

View File

@ -79,7 +79,7 @@ public class ReplaceDataStreamBackingIndexStepTests extends AbstractStepTestCase
ClusterState clusterState = ClusterState.builder(emptyClusterState()).metadata(
Metadata.builder().put(sourceIndexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("timestamp"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(sourceIndexMetadata.getIndex()))).build()
).build();
@ -108,7 +108,7 @@ public class ReplaceDataStreamBackingIndexStepTests extends AbstractStepTestCase
Metadata.builder()
.put(sourceIndexMetadata, true)
.put(writeIndexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("timestamp"), backingIndices))
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"), backingIndices))
.build()
).build();
@ -151,7 +151,7 @@ public class ReplaceDataStreamBackingIndexStepTests extends AbstractStepTestCase
Metadata.builder()
.put(sourceIndexMetadata, true)
.put(writeIndexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("timestamp"), backingIndices))
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"), backingIndices))
.put(targetIndexMetadata, true)
.build()
).build();

View File

@ -135,7 +135,7 @@ public class RolloverStepTests extends AbstractStepMasterTimeoutTestCase<Rollove
.metadata(
Metadata.builder()
.put(new DataStream(
dataStreamName, createTimestampField("timestamp"),
dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(indexMetadata.getIndex()), 1L)
)
.put(indexMetadata, true)

View File

@ -96,7 +96,7 @@ public class UpdateRolloverLifecycleDateStepTests extends AbstractStepTestCase<U
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.put(new DataStream(dataStreamName, createTimestampField("timestamp"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(originalIndexMeta.getIndex(),
rolledIndexMeta.getIndex()), 2L))
.put(originalIndexMeta, true)

View File

@ -167,7 +167,7 @@ public class WaitForActiveShardsTests extends AbstractStepTestCase<WaitForActive
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.put(new DataStream(dataStreamName, createTimestampField("timestamp"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(originalIndexMeta.getIndex(),
rolledIndexMeta.getIndex()),
2L))

View File

@ -150,7 +150,7 @@ public class WaitForRolloverReadyStepTests extends AbstractStepTestCase<WaitForR
SetOnce<Boolean> conditionsMet = new SetOnce<>();
Metadata metadata = Metadata.builder().put(indexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("timestamp"),
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"),
org.elasticsearch.common.collect.List.of(indexMetadata.getIndex()), 1L))
.build();
step.evaluateCondition(metadata, indexMetadata.getIndex(), new AsyncWaitStep.Listener() {

View File

@ -696,7 +696,7 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
private static void createIndex(String index, boolean isDatastream) {
String mapping = "{\n" +
" \"properties\": {\n" +
" \"time\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }," +
" \""+ BOOLEAN_FIELD + "\": {\n" +
@ -729,7 +729,7 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
" }";
if (isDatastream) {
try {
createDataStreamAndTemplate(index, "time", mapping);
createDataStreamAndTemplate(index, "@timestamp", mapping);
} catch (IOException ex) {
throw new ElasticsearchException(ex);
}
@ -745,7 +745,7 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < numTrainingRows; i++) {
List<Object> source = Arrays.asList(
"time", "2020-12-12",
"@timestamp", "2020-12-12",
BOOLEAN_FIELD, BOOLEAN_FIELD_VALUES.get(i % BOOLEAN_FIELD_VALUES.size()),
NUMERICAL_FIELD, NUMERICAL_FIELD_VALUES.get(i % NUMERICAL_FIELD_VALUES.size()),
DISCRETE_NUMERICAL_FIELD, DISCRETE_NUMERICAL_FIELD_VALUES.get(i % DISCRETE_NUMERICAL_FIELD_VALUES.size()),
@ -777,7 +777,7 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
if (NESTED_FIELD.equals(dependentVariable) == false) {
source.addAll(Arrays.asList(NESTED_FIELD, KEYWORD_FIELD_VALUES.get(i % KEYWORD_FIELD_VALUES.size())));
}
source.addAll(Arrays.asList("time", "2020-12-12"));
source.addAll(Arrays.asList("@timestamp", "2020-12-12"));
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}

View File

@ -108,10 +108,13 @@ public class DatafeedJobsIT extends MlNativeAutodetectIntegTestCase {
" \"properties\": {\n" +
" \"time\": {\n" +
" \"type\": \"date\"\n" +
" }," +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }" +
" }\n" +
" }";
createDataStreamAndTemplate("datafeed_data_stream", "time", mapping);
createDataStreamAndTemplate("datafeed_data_stream", "@timestamp", mapping);
long numDocs = randomIntBetween(32, 2048);
long now = System.currentTimeMillis();
long oneWeekAgo = now - 604800000;

View File

@ -470,7 +470,7 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase {
static void indexData(String sourceIndex, int numTrainingRows, int numNonTrainingRows, boolean dataStream) {
String mapping = "{\n" +
" \"properties\": {\n" +
" \"time\": {\n" +
" \"@timestamp\": {\n" +
" \"type\": \"date\"\n" +
" }," +
" \""+ NUMERICAL_FEATURE_FIELD + "\": {\n" +
@ -486,7 +486,7 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase {
" }";
if (dataStream) {
try {
createDataStreamAndTemplate(sourceIndex, "time", mapping);
createDataStreamAndTemplate(sourceIndex, "@timestamp", mapping);
} catch (IOException ex) {
throw new ElasticsearchException(ex);
}
@ -503,7 +503,7 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase {
NUMERICAL_FEATURE_FIELD, NUMERICAL_FEATURE_VALUES.get(i % NUMERICAL_FEATURE_VALUES.size()),
DISCRETE_NUMERICAL_FEATURE_FIELD, DISCRETE_NUMERICAL_FEATURE_VALUES.get(i % DISCRETE_NUMERICAL_FEATURE_VALUES.size()),
DEPENDENT_VARIABLE_FIELD, DEPENDENT_VARIABLE_VALUES.get(i % DEPENDENT_VARIABLE_VALUES.size()),
"time", Instant.now().toEpochMilli());
"@timestamp", Instant.now().toEpochMilli());
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
@ -511,7 +511,7 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase {
List<Object> source = Arrays.asList(
NUMERICAL_FEATURE_FIELD, NUMERICAL_FEATURE_VALUES.get(i % NUMERICAL_FEATURE_VALUES.size()),
DISCRETE_NUMERICAL_FEATURE_FIELD, DISCRETE_NUMERICAL_FEATURE_VALUES.get(i % DISCRETE_NUMERICAL_FEATURE_VALUES.size()),
"time", Instant.now().toEpochMilli());
"@timestamp", Instant.now().toEpochMilli());
IndexRequest indexRequest = new IndexRequest(sourceIndex).source(source.toArray()).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}

View File

@ -261,7 +261,7 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase {
IndexRequest indexRequest = new IndexRequest(index, type);
long timestamp = start + randomIntBetween(0, maxDelta);
assert timestamp >= start && timestamp < end;
indexRequest.source("time", timestamp).opType(DocWriteRequest.OpType.CREATE);
indexRequest.source("time", timestamp, "@timestamp", timestamp).opType(DocWriteRequest.OpType.CREATE);
bulkRequestBuilder.add(indexRequest);
}
BulkResponse bulkResponse = bulkRequestBuilder

View File

@ -87,12 +87,17 @@ public abstract class TransformRestTestCase extends ESRestTestCase {
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
{
builder.startObject("mappings").startObject("properties").startObject("timestamp").field("type", dateType);
builder.startObject("mappings").startObject("properties");
builder.startObject("@timestamp").field("type", dateType);
if (dateType.equals("date_nanos")) {
builder.field("format", "strict_date_optional_time_nanos");
}
builder.endObject();
builder.startObject("timestamp").field("type", dateType);
if (dateType.equals("date_nanos")) {
builder.field("format", "strict_date_optional_time_nanos");
}
builder.endObject()
.startObject("user_id")
.field("type", "keyword")
@ -116,7 +121,7 @@ public abstract class TransformRestTestCase extends ESRestTestCase {
"{\n" +
" \"index_patterns\": [ \"" + indexName + "\" ],\n" +
" \"data_stream\": {\n" +
" \"timestamp_field\": \"timestamp\"\n" +
" \"timestamp_field\": \"@timestamp\"\n" +
" },\n" +
" \"template\": \n" + Strings.toString(builder) +
"}"
@ -169,6 +174,8 @@ public abstract class TransformRestTestCase extends ESRestTestCase {
.append(location)
.append("\",\"timestamp\":\"")
.append(date_string)
.append("\",\"@timestamp\":\"")
.append(date_string)
.append("\"}\n");
if (i % 50 == 0) {