mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
This adds a low precendece mapping for the `@timestamp` field with type `date`. This will aid with the bootstrapping of data streams as a timestamp mapping can be omitted when nanos precision is not needed. (cherry picked from commit 4e72f43d62edfe52a934367ce9809b5efbcdb531) Signed-off-by: Andrei Dan <andrei.dan@elastic.co>
This commit is contained in:
parent
5580eb61ed
commit
4180333bbc
@ -99,7 +99,7 @@ PUT /_ilm/policy/logs_policy
|
||||
Each data stream requires an <<indices-templates,index template>>. The data
|
||||
stream uses this template to create its backing indices.
|
||||
|
||||
Index templates for data streams must contain:
|
||||
An index template for a data stream must contain:
|
||||
|
||||
* A name or wildcard (`*`) pattern for the data stream in the `index_patterns`
|
||||
property.
|
||||
@ -138,16 +138,22 @@ this pattern.
|
||||
This timestamp field must be included in every document indexed to the data
|
||||
stream.
|
||||
|
||||
* A <<date,`date`>> or <<date_nanos,`date_nanos`>> field mapping for the
|
||||
timestamp field specified in the `timestamp_field` property.
|
||||
The template can also contain:
|
||||
|
||||
* An optional field mapping for the `@timestamp` field. Both the <<date,`date`>> and
|
||||
<<date_nanos,`date_nanos`>> field data types are supported. If no mapping is specified,
|
||||
a <<date,`date`>> field data type with default options is used.
|
||||
+
|
||||
IMPORTANT: Carefully consider the timestamp field's mapping, including
|
||||
<<mapping-params,mapping parameters>> such as <<mapping-date-format,`format`>>.
|
||||
Once the stream is created, you can only update the timestamp field's mapping by
|
||||
reindexing the data stream. See
|
||||
This mapping can include other <<mapping-params,mapping parameters>>, such as
|
||||
<<mapping-date-format,`format`>>.
|
||||
+
|
||||
IMPORTANT: Carefully consider the `@timestamp` field's mapping, including
|
||||
its <<mapping-params,mapping parameters>>.
|
||||
Once the stream is created, you can only update the `@timestamp` field's mapping
|
||||
by reindexing the data stream. See
|
||||
<<data-streams-use-reindex-to-change-mappings-settings>>.
|
||||
|
||||
* If you intend to use {ilm-init}, you must specify the
|
||||
* If you intend to use {ilm-init}, the
|
||||
<<configure-a-data-stream-ilm-policy,lifecycle policy>> in the
|
||||
`index.lifecycle.name` setting.
|
||||
|
||||
@ -174,13 +180,6 @@ PUT /_index_template/logs_data_stream
|
||||
"timestamp_field": "@timestamp"
|
||||
},
|
||||
"template": {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"index.lifecycle.name": "logs_policy"
|
||||
}
|
||||
|
@ -21,15 +21,6 @@ PUT /_index_template/logs_data_stream
|
||||
"index_patterns": [ "logs*" ],
|
||||
"data_stream": {
|
||||
"timestamp_field": "@timestamp"
|
||||
},
|
||||
"template": {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,8 +35,8 @@ setup:
|
||||
---
|
||||
"Resolve index with indices and aliases":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "resolve index api only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.resolve_index:
|
||||
@ -63,8 +63,8 @@ setup:
|
||||
---
|
||||
"Resolve index with hidden and closed indices":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: change after backporting
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.resolve_index:
|
||||
|
@ -68,6 +68,7 @@ import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
@ -87,6 +88,16 @@ import static org.elasticsearch.indices.cluster.IndicesClusterStateService.Alloc
|
||||
*/
|
||||
public class MetadataIndexTemplateService {
|
||||
|
||||
public static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp";
|
||||
public static final String DEFAULT_TIMESTAMP_MAPPING = "{\n" +
|
||||
" \"_doc\": {\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"@timestamp\": {\n" +
|
||||
" \"type\": \"date\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
private static final Logger logger = LogManager.getLogger(MetadataIndexTemplateService.class);
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
|
||||
|
||||
@ -916,7 +927,7 @@ public class MetadataIndexTemplateService {
|
||||
*/
|
||||
public static List<CompressedXContent> collectMappings(final ClusterState state,
|
||||
final String templateName,
|
||||
final String indexName) {
|
||||
final String indexName) throws Exception {
|
||||
final ComposableIndexTemplate template = state.metadata().templatesV2().get(templateName);
|
||||
assert template != null : "attempted to resolve mappings for a template [" + templateName +
|
||||
"] that did not exist in the cluster state";
|
||||
@ -931,11 +942,16 @@ public class MetadataIndexTemplateService {
|
||||
.map(ComponentTemplate::template)
|
||||
.map(Template::mappings)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList());
|
||||
.collect(Collectors.toCollection(LinkedList::new));
|
||||
// Add the actual index template's mappings, since it takes the highest precedence
|
||||
Optional.ofNullable(template.template())
|
||||
.map(Template::mappings)
|
||||
.ifPresent(mappings::add);
|
||||
if (template.getDataStreamTemplate() != null && indexName.startsWith(DataStream.BACKING_INDEX_PREFIX)) {
|
||||
// add a default mapping for the `@timestamp` field, at the lowest precedence, to make bootstrapping data streams more
|
||||
// straightforward as all backing indices are required to have a timestamp field
|
||||
mappings.add(0, new CompressedXContent(DEFAULT_TIMESTAMP_MAPPING));
|
||||
}
|
||||
|
||||
// Only include _timestamp mapping snippet if creating backing index.
|
||||
if (indexName.startsWith(DataStream.BACKING_INDEX_PREFIX)) {
|
||||
@ -1112,7 +1128,7 @@ public class MetadataIndexTemplateService {
|
||||
}
|
||||
}
|
||||
|
||||
List<CompressedXContent> mappings = collectMappings(stateWithIndex, templateName, indexName );
|
||||
List<CompressedXContent> mappings = collectMappings(stateWithIndex, templateName, indexName);
|
||||
try {
|
||||
MapperService mapperService = tempIndexService.mapperService();
|
||||
for (CompressedXContent mapping : mappings) {
|
||||
|
@ -73,6 +73,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD;
|
||||
import static org.elasticsearch.common.settings.Settings.builder;
|
||||
import static org.elasticsearch.indices.ShardLimitValidatorTests.createTestShardLimitService;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
@ -809,7 +810,6 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
assertThat(parsedMappings.get(0),
|
||||
equalTo(Collections.singletonMap("_doc",
|
||||
Collections.singletonMap("properties", Collections.singletonMap("field2", Collections.singletonMap("type", "text"))))));
|
||||
@ -821,6 +821,249 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
||||
Collections.singletonMap("properties", Collections.singletonMap("field3", Collections.singletonMap("type", "integer"))))));
|
||||
}
|
||||
|
||||
public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Exception {
|
||||
final MetadataIndexTemplateService service = getMetadataIndexTemplateService();
|
||||
ClusterState state = ClusterState.EMPTY_STATE;
|
||||
|
||||
ComponentTemplate ct1 = new ComponentTemplate(new Template(null,
|
||||
new CompressedXContent("{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"field1\": {\n" +
|
||||
" \"type\": \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }"), null), null, null);
|
||||
|
||||
state = service.addComponentTemplate(state, true, "ct1", ct1);
|
||||
|
||||
{
|
||||
ComposableIndexTemplate it = new ComposableIndexTemplate(org.elasticsearch.common.collect.List.of("logs*"),
|
||||
new Template(null,
|
||||
new CompressedXContent("{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"field2\": {\n" +
|
||||
" \"type\": \"integer\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }"), null),
|
||||
org.elasticsearch.common.collect.List.of("ct1"), 0L, 1L, null,
|
||||
new ComposableIndexTemplate.DataStreamTemplate(DEFAULT_TIMESTAMP_FIELD));
|
||||
state = service.addIndexTemplateV2(state, true, "logs-data-stream-template", it);
|
||||
|
||||
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "logs-data-stream-template",
|
||||
DataStream.getDefaultBackingIndexName("logs", 1L));
|
||||
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), equalTo(4));
|
||||
List<Map<String, Object>> parsedMappings = mappings.stream()
|
||||
.map(m -> {
|
||||
try {
|
||||
return MapperService.parseMapping(
|
||||
new NamedXContentRegistry(org.elasticsearch.common.collect.List.of()), m.string());
|
||||
} catch (Exception e) {
|
||||
logger.error(e);
|
||||
fail("failed to parse mappings: " + m.string());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
Map<String, Object> firstParsedMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of(DEFAULT_TIMESTAMP_FIELD,
|
||||
org.elasticsearch.common.collect.Map.of("type", "date"))));
|
||||
assertThat(parsedMappings.get(0), equalTo(firstParsedMapping));
|
||||
|
||||
Map<String, Object> secondMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field1",
|
||||
org.elasticsearch.common.collect.Map.of("type", "keyword"))));
|
||||
assertThat(parsedMappings.get(1), equalTo(secondMapping));
|
||||
|
||||
|
||||
Map<String, Object> thirdMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field2",
|
||||
org.elasticsearch.common.collect.Map.of("type", "integer"))));
|
||||
assertThat(parsedMappings.get(2), equalTo(thirdMapping));
|
||||
}
|
||||
|
||||
{
|
||||
// indices matched by templates without the data stream field defined don't get the default @timestamp mapping
|
||||
ComposableIndexTemplate it = new ComposableIndexTemplate(org.elasticsearch.common.collect.List.of("timeseries*"),
|
||||
new Template(null,
|
||||
new CompressedXContent("{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"field2\": {\n" +
|
||||
" \"type\": \"integer\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }"), null),
|
||||
org.elasticsearch.common.collect.List.of("ct1"), 0L, 1L, null, null);
|
||||
state = service.addIndexTemplateV2(state, true, "timeseries-template", it);
|
||||
|
||||
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "timeseries-template", "timeseries");
|
||||
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), equalTo(2));
|
||||
List<Map<String, Object>> parsedMappings = mappings.stream()
|
||||
.map(m -> {
|
||||
try {
|
||||
return MapperService.parseMapping(
|
||||
new NamedXContentRegistry(org.elasticsearch.common.collect.List.of()), m.string());
|
||||
} catch (Exception e) {
|
||||
logger.error(e);
|
||||
fail("failed to parse mappings: " + m.string());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
Map<String, Object> firstMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field1",
|
||||
org.elasticsearch.common.collect.Map.of("type", "keyword"))));
|
||||
assertThat(parsedMappings.get(0), equalTo(firstMapping));
|
||||
|
||||
|
||||
Map<String, Object> secondMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field2",
|
||||
org.elasticsearch.common.collect.Map.of("type", "integer"))));
|
||||
assertThat(parsedMappings.get(1), equalTo(secondMapping));
|
||||
|
||||
// a default @timestamp mapping will not be added if the matching template doesn't have the data stream field configured, even
|
||||
// if the index name matches that of a data stream backing index
|
||||
mappings = MetadataIndexTemplateService.collectMappings(state, "timeseries-template",
|
||||
DataStream.getDefaultBackingIndexName("timeseries", 1L));
|
||||
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), equalTo(2));
|
||||
parsedMappings = mappings.stream()
|
||||
.map(m -> {
|
||||
try {
|
||||
return MapperService.parseMapping(
|
||||
new NamedXContentRegistry(org.elasticsearch.common.collect.List.of()), m.string());
|
||||
} catch (Exception e) {
|
||||
logger.error(e);
|
||||
fail("failed to parse mappings: " + m.string());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
firstMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field1",
|
||||
org.elasticsearch.common.collect.Map.of("type", "keyword"))));
|
||||
assertThat(parsedMappings.get(0), equalTo(firstMapping));
|
||||
|
||||
|
||||
secondMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of("field2",
|
||||
org.elasticsearch.common.collect.Map.of("type", "integer"))));
|
||||
assertThat(parsedMappings.get(1), equalTo(secondMapping));
|
||||
}
|
||||
}
|
||||
|
||||
public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception {
|
||||
final MetadataIndexTemplateService service = getMetadataIndexTemplateService();
|
||||
ClusterState state = ClusterState.EMPTY_STATE;
|
||||
|
||||
{
|
||||
// user defines a @timestamp mapping as part of a component template
|
||||
ComponentTemplate ct1 = new ComponentTemplate(new Template(null,
|
||||
new CompressedXContent("{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"@timestamp\": {\n" +
|
||||
" \"type\": \"date_nanos\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }"), null), null, null);
|
||||
|
||||
state = service.addComponentTemplate(state, true, "ct1", ct1);
|
||||
ComposableIndexTemplate it = new ComposableIndexTemplate(org.elasticsearch.common.collect.List.of("logs*"), null,
|
||||
org.elasticsearch.common.collect.List.of("ct1"), 0L, 1L, null,
|
||||
new ComposableIndexTemplate.DataStreamTemplate(DEFAULT_TIMESTAMP_FIELD));
|
||||
state = service.addIndexTemplateV2(state, true, "logs-template", it);
|
||||
|
||||
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "logs-template",
|
||||
DataStream.getDefaultBackingIndexName("logs", 1L));
|
||||
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), equalTo(3));
|
||||
List<Map<String, Object>> parsedMappings = mappings.stream()
|
||||
.map(m -> {
|
||||
try {
|
||||
return MapperService.parseMapping(
|
||||
new NamedXContentRegistry(org.elasticsearch.common.collect.List.of()), m.string());
|
||||
} catch (Exception e) {
|
||||
logger.error(e);
|
||||
fail("failed to parse mappings: " + m.string());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
Map<String, Object> firstMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of(DEFAULT_TIMESTAMP_FIELD,
|
||||
org.elasticsearch.common.collect.Map.of("type", "date"))));
|
||||
assertThat(parsedMappings.get(0), equalTo(firstMapping));
|
||||
|
||||
Map<String, Object> secondMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of(DEFAULT_TIMESTAMP_FIELD,
|
||||
org.elasticsearch.common.collect.Map.of("type", "date_nanos"))));
|
||||
assertThat(parsedMappings.get(1), equalTo(secondMapping));
|
||||
}
|
||||
|
||||
{
|
||||
// user defines a @timestamp mapping as part of a composable index template
|
||||
Template template = new Template(null, new CompressedXContent("{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"@timestamp\": {\n" +
|
||||
" \"type\": \"date_nanos\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }"), null);
|
||||
ComposableIndexTemplate it = new ComposableIndexTemplate(
|
||||
org.elasticsearch.common.collect.List.of("timeseries*"), template, null, 0L, 1L, null,
|
||||
new ComposableIndexTemplate.DataStreamTemplate(DEFAULT_TIMESTAMP_FIELD));
|
||||
state = service.addIndexTemplateV2(state, true, "timeseries-template", it);
|
||||
|
||||
List<CompressedXContent> mappings = MetadataIndexTemplateService.collectMappings(state, "timeseries-template",
|
||||
DataStream.getDefaultBackingIndexName("timeseries-template", 1L));
|
||||
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), equalTo(3));
|
||||
List<Map<String, Object>> parsedMappings = mappings.stream()
|
||||
.map(m -> {
|
||||
try {
|
||||
return MapperService.parseMapping(
|
||||
new NamedXContentRegistry(org.elasticsearch.common.collect.List.of()), m.string());
|
||||
} catch (Exception e) {
|
||||
logger.error(e);
|
||||
fail("failed to parse mappings: " + m.string());
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
Map<String, Object> firstMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of(DEFAULT_TIMESTAMP_FIELD,
|
||||
org.elasticsearch.common.collect.Map.of("type", "date"))));
|
||||
assertThat(parsedMappings.get(0), equalTo(firstMapping));
|
||||
|
||||
Map<String, Object> secondMapping = org.elasticsearch.common.collect.Map.of("_doc",
|
||||
org.elasticsearch.common.collect.Map.of("properties",
|
||||
org.elasticsearch.common.collect.Map.of(DEFAULT_TIMESTAMP_FIELD,
|
||||
org.elasticsearch.common.collect.Map.of("type", "date_nanos"))));
|
||||
assertThat(parsedMappings.get(1), equalTo(secondMapping));
|
||||
}
|
||||
}
|
||||
|
||||
public void testResolveSettings() throws Exception {
|
||||
final MetadataIndexTemplateService service = getMetadataIndexTemplateService();
|
||||
ClusterState state = ClusterState.EMPTY_STATE;
|
||||
@ -1108,16 +1351,8 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
||||
.build())
|
||||
.build();
|
||||
|
||||
String mapping = "{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"@timestamp\": {\n" +
|
||||
" \"type\": \"date\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
Template mappingTemplate = new Template(null, new CompressedXContent(mapping), null);
|
||||
ComposableIndexTemplate template = new ComposableIndexTemplate(Collections.singletonList("logs-*-*"),
|
||||
mappingTemplate, null, 100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
|
||||
null, null, 100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
|
||||
|
||||
state = service.addIndexTemplateV2(state, false, "logs", template);
|
||||
|
||||
@ -1160,7 +1395,7 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
||||
|
||||
// Change the pattern to one that doesn't match the data stream
|
||||
e = expectThrows(IllegalArgumentException.class, () -> {
|
||||
ComposableIndexTemplate newTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-postgres-*"), mappingTemplate,
|
||||
ComposableIndexTemplate newTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-postgres-*"), null,
|
||||
null, 100L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
|
||||
service.addIndexTemplateV2(stateWithDS, false, "logs", newTemplate);
|
||||
});
|
||||
@ -1170,7 +1405,7 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
||||
"cause data streams [unreferenced, logs-mysql-default] to no longer match a data stream template"));
|
||||
|
||||
// Add an additional template that matches our data stream at a lower priority
|
||||
ComposableIndexTemplate mysqlTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-mysql-*"), mappingTemplate,
|
||||
ComposableIndexTemplate mysqlTemplate = new ComposableIndexTemplate(Collections.singletonList("logs-mysql-*"), null,
|
||||
null, 50L, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp"));
|
||||
ClusterState stateWithDSAndTemplate = service.addIndexTemplateV2(stateWithDS, false, "logs-mysql", mysqlTemplate);
|
||||
|
||||
|
@ -12,11 +12,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,6 +1,9 @@
|
||||
setup:
|
||||
- skip:
|
||||
features: allowed_warnings
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
allowed_warnings:
|
||||
- "index template [my-template1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation"
|
||||
@ -9,10 +12,6 @@ setup:
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
settings:
|
||||
index.number_of_replicas: 0
|
||||
data_stream:
|
||||
@ -28,15 +27,15 @@ setup:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
type: date_nanos
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
---
|
||||
"Create data stream":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.create_data_stream:
|
||||
@ -65,6 +64,18 @@ setup:
|
||||
- match: { data_streams.1.indices.0.index_name: '.ds-simple-data-stream2-000001' }
|
||||
- match: { data_streams.1.template: 'my-template2' }
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: .ds-simple-data-stream1-000001
|
||||
expand_wildcards: hidden
|
||||
- match: { \.ds-simple-data-stream1-000001.mappings.properties.@timestamp.type: 'date' }
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: .ds-simple-data-stream2-000001
|
||||
expand_wildcards: hidden
|
||||
- match: { \.ds-simple-data-stream2-000001.mappings.properties.@timestamp.type: 'date_nanos' }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: simple-data-stream1
|
||||
@ -97,8 +108,8 @@ setup:
|
||||
---
|
||||
"Create data stream with invalid name":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
catch: bad_request
|
||||
@ -112,7 +123,7 @@ setup:
|
||||
---
|
||||
"Get data stream":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
version: " - 7.9.99"
|
||||
reason: "change to 7.8.99 after backport"
|
||||
|
||||
- do:
|
||||
@ -177,8 +188,8 @@ setup:
|
||||
---
|
||||
"Delete data stream with backing indices":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.create_data_stream:
|
||||
@ -221,8 +232,8 @@ setup:
|
||||
---
|
||||
"append-only writes to backing indices prohobited":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -232,11 +243,6 @@ setup:
|
||||
name: generic_logs_template
|
||||
body:
|
||||
index_patterns: logs-*
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -293,11 +299,6 @@ setup:
|
||||
name: generic_logs_template
|
||||
body:
|
||||
index_patterns: logs-*
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -12,11 +12,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
---
|
||||
"Test apis that do not supported data streams":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -12,11 +12,6 @@
|
||||
name: my-template
|
||||
body:
|
||||
index_patterns: [logs-*]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -55,8 +50,8 @@
|
||||
---
|
||||
"Prohibit clone on data stream's write index":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -66,11 +61,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -110,7 +100,7 @@
|
||||
---
|
||||
"Prohibit shrink on data stream's write index":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
version: " - 7.9.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
features: allowed_warnings
|
||||
|
||||
@ -121,11 +111,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -153,8 +138,8 @@
|
||||
---
|
||||
"Close write index for data stream fails":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -164,11 +149,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -190,8 +170,8 @@
|
||||
---
|
||||
"Prohibit split on data stream's write index":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -201,11 +181,6 @@
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
---
|
||||
"Put index template":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -18,10 +18,6 @@
|
||||
settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
@ -2,6 +2,9 @@
|
||||
setup:
|
||||
- skip:
|
||||
features: allowed_warnings
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
allowed_warnings:
|
||||
- "index template [logs_template] has index patterns [logs-foobar] matching patterns from existing older templates [global] with patterns (global => [*]); this template [logs_template] will take precedence during new index creation"
|
||||
@ -9,11 +12,6 @@ setup:
|
||||
name: logs_template
|
||||
body:
|
||||
index_patterns: logs-foobar
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -30,8 +28,8 @@ teardown:
|
||||
---
|
||||
"Verify get index api":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.get:
|
||||
@ -66,10 +64,6 @@ teardown:
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
settings:
|
||||
number_of_shards: "1"
|
||||
number_of_replicas: "0"
|
||||
@ -121,11 +115,6 @@ teardown:
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -159,11 +148,6 @@ teardown:
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
@ -198,11 +182,6 @@ teardown:
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
setup:
|
||||
- skip:
|
||||
features: allowed_warnings
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
- do:
|
||||
allowed_warnings:
|
||||
- "index template [my-template] has index patterns [simple-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation"
|
||||
@ -8,19 +10,14 @@ setup:
|
||||
name: my-template
|
||||
body:
|
||||
index_patterns: [simple-*]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
---
|
||||
"Delete backing index on data stream":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.create_data_stream:
|
||||
@ -71,8 +68,8 @@ setup:
|
||||
---
|
||||
"Attempt to delete write index on data stream is rejected":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "mute bwc until backported"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.create_data_stream:
|
||||
|
@ -1,8 +1,8 @@
|
||||
---
|
||||
"Get backing indices for data stream":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -12,11 +12,6 @@
|
||||
name: my-template
|
||||
body:
|
||||
index_patterns: [data-*]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
---
|
||||
"Roll over a data stream":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "data streams only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -12,11 +12,6 @@
|
||||
name: my-template
|
||||
body:
|
||||
index_patterns: [data-*]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
---
|
||||
setup:
|
||||
- skip:
|
||||
version: "7.8.99 - "
|
||||
reason: "resolve index api only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
|
||||
- do:
|
||||
@ -12,13 +12,9 @@ setup:
|
||||
name: my-template1
|
||||
body:
|
||||
index_patterns: [simple-data-stream1]
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
- do:
|
||||
allowed_warnings:
|
||||
- "index template [my-template2] has index patterns [simple-data-stream2] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation"
|
||||
@ -29,10 +25,10 @@ setup:
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp2':
|
||||
'@timestamp':
|
||||
type: date
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp2'
|
||||
timestamp_field: '@timestamp'
|
||||
|
||||
- do:
|
||||
indices.create_data_stream:
|
||||
@ -76,8 +72,8 @@ setup:
|
||||
---
|
||||
"Resolve index with indices, aliases, and data streams":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
reason: "resolve index api only supported in 7.9+"
|
||||
version: " - 7.9.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
|
||||
- do:
|
||||
indices.resolve_index:
|
||||
@ -120,7 +116,7 @@ setup:
|
||||
---
|
||||
"Resolve index with hidden and closed indices":
|
||||
- skip:
|
||||
version: " - 7.8.99"
|
||||
version: " - 7.9.99"
|
||||
reason: change after backporting
|
||||
|
||||
- do:
|
||||
|
@ -1,8 +1,11 @@
|
||||
---
|
||||
setup:
|
||||
- skip:
|
||||
version: " - 7.99.99"
|
||||
reason: "enable in 7.9+ when backported"
|
||||
features: allowed_warnings
|
||||
- do:
|
||||
|
||||
- do :
|
||||
allowed_warnings:
|
||||
- "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation"
|
||||
indices.put_index_template:
|
||||
@ -11,11 +14,6 @@ setup:
|
||||
index_patterns: logs-*
|
||||
data_stream:
|
||||
timestamp_field: '@timestamp'
|
||||
template:
|
||||
mappings:
|
||||
properties:
|
||||
'@timestamp':
|
||||
type: date
|
||||
|
||||
---
|
||||
teardown:
|
||||
|
@ -41,9 +41,9 @@ import org.elasticsearch.cluster.metadata.DataStream;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.Template;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.List;
|
||||
import org.elasticsearch.common.collect.Map;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
@ -60,19 +60,19 @@ import org.junit.After;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Locale;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.action.DocWriteRequest.OpType.CREATE;
|
||||
import static org.elasticsearch.cluster.DataStreamTestHelper.generateMapping;
|
||||
import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItemInArray;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
@ -98,11 +98,11 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testBasicScenario() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("metrics-foo*"));
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("metrics-foo");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
|
||||
putComposableIndexTemplate("id2", "@timestamp", List.of("metrics-bar*"));
|
||||
putComposableIndexTemplate("id2", List.of("metrics-bar*"));
|
||||
createDataStreamRequest = new CreateDataStreamAction.Request("metrics-bar");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
|
||||
@ -136,9 +136,9 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
|
||||
|
||||
int numDocsBar = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-bar", "@timestamp", numDocsBar);
|
||||
indexDocs("metrics-bar", numDocsBar);
|
||||
int numDocsFoo = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-foo", "@timestamp", numDocsFoo);
|
||||
indexDocs("metrics-foo", numDocsFoo);
|
||||
|
||||
verifyDocs("metrics-bar", numDocsBar, 1, 1);
|
||||
verifyDocs("metrics-foo", numDocsFoo, 1, 1);
|
||||
@ -166,9 +166,9 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
assertThat(ObjectPath.eval("properties.@timestamp.type", mappings), is("date"));
|
||||
|
||||
int numDocsBar2 = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-bar", "@timestamp", numDocsBar2);
|
||||
indexDocs("metrics-bar", numDocsBar2);
|
||||
int numDocsFoo2 = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-foo", "@timestamp", numDocsFoo2);
|
||||
indexDocs("metrics-foo", numDocsFoo2);
|
||||
|
||||
verifyDocs("metrics-bar", numDocsBar + numDocsBar2, 1, 2);
|
||||
verifyDocs("metrics-foo", numDocsFoo + numDocsFoo2, 1, 2);
|
||||
@ -209,7 +209,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testOtherWriteOps() throws Exception {
|
||||
putComposableIndexTemplate("id", "@timestamp", List.of("metrics-foobar*"));
|
||||
putComposableIndexTemplate("id", List.of("metrics-foobar*"));
|
||||
String dataStreamName = "metrics-foobar";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -300,7 +300,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet();
|
||||
|
||||
int numDocs = randomIntBetween(2, 16);
|
||||
indexDocs(dataStreamName, "@timestamp", numDocs);
|
||||
indexDocs(dataStreamName, numDocs);
|
||||
verifyDocs(dataStreamName, numDocs, 1, 1);
|
||||
|
||||
String backingIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1);
|
||||
@ -334,7 +334,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
);
|
||||
|
||||
int numDocs2 = randomIntBetween(2, 16);
|
||||
indexDocs(dataStreamName, "@timestamp", numDocs2);
|
||||
indexDocs(dataStreamName, numDocs2);
|
||||
verifyDocs(dataStreamName, numDocs + numDocs2, 1, 2);
|
||||
|
||||
DeleteDataStreamAction.Request deleteDataStreamRequest = new DeleteDataStreamAction.Request(new String[] { dataStreamName });
|
||||
@ -358,28 +358,6 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
);
|
||||
}
|
||||
|
||||
public void testTimeStampValidationNoFieldMapping() throws Exception {
|
||||
// Adding a template without a mapping for timestamp field and expect template creation to fail.
|
||||
PutComposableIndexTemplateAction.Request createTemplateRequest = new PutComposableIndexTemplateAction.Request("logs-foo");
|
||||
createTemplateRequest.indexTemplate(
|
||||
new ComposableIndexTemplate(
|
||||
Collections.singletonList("logs-*"),
|
||||
new Template(null, new CompressedXContent("{}"), null),
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
new ComposableIndexTemplate.DataStreamTemplate("@timestamp")
|
||||
)
|
||||
);
|
||||
|
||||
Exception e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().execute(PutComposableIndexTemplateAction.INSTANCE, createTemplateRequest).actionGet()
|
||||
);
|
||||
assertThat(e.getCause().getCause().getMessage(), equalTo("the configured timestamp field [@timestamp] does not exist"));
|
||||
}
|
||||
|
||||
public void testTimeStampValidationInvalidFieldMapping() throws Exception {
|
||||
// Adding a template with an invalid mapping for timestamp field and expect template creation to fail.
|
||||
String mapping = "{\n"
|
||||
@ -413,7 +391,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testResolvabilityOfDataStreamsInAPIs() throws Exception {
|
||||
putComposableIndexTemplate("id", "@timestamp", List.of("logs-*"));
|
||||
putComposableIndexTemplate("id", List.of("logs-*"));
|
||||
String dataStreamName = "logs-foobar";
|
||||
CreateDataStreamAction.Request request = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(request).actionGet();
|
||||
@ -528,7 +506,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testCannotDeleteComposableTemplateUsedByDataStream() throws Exception {
|
||||
putComposableIndexTemplate("id", "@timestamp", List.of("metrics-foobar*"));
|
||||
putComposableIndexTemplate("id", List.of("metrics-foobar*"));
|
||||
String dataStreamName = "metrics-foobar-baz";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -561,7 +539,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testAliasActionsFailOnDataStreams() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("metrics-foo*"));
|
||||
String dataStreamName = "metrics-foo";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -576,7 +554,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testAliasActionsFailOnDataStreamBackingIndices() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("metrics-foo*"));
|
||||
String dataStreamName = "metrics-foo";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -613,7 +591,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ " }";
|
||||
putComposableIndexTemplate("id1", "@timestamp", mapping, List.of("logs-foo*"), null);
|
||||
putComposableIndexTemplate("id1", mapping, List.of("logs-foo*"), null);
|
||||
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("logs-foobar");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -627,7 +605,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testUpdateMappingViaDataStream() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("logs-*"));
|
||||
putComposableIndexTemplate("id1", List.of("logs-*"));
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("logs-foobar");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).actionGet();
|
||||
|
||||
@ -669,7 +647,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testUpdateIndexSettingsViaDataStream() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("logs-*"));
|
||||
putComposableIndexTemplate("id1", List.of("logs-*"));
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("logs-foobar");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).actionGet();
|
||||
|
||||
@ -698,7 +676,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testIndexDocsWithCustomRoutingTargetingDataStreamIsNotAllowed() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("logs-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("logs-foo*"));
|
||||
|
||||
// Index doc that triggers creation of a data stream
|
||||
String dataStream = "logs-foobar";
|
||||
@ -747,7 +725,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testIndexDocsWithCustomRoutingTargetingBackingIndex() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("logs-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("logs-foo*"));
|
||||
|
||||
// Index doc that triggers creation of a data stream
|
||||
IndexRequest indexRequest = new IndexRequest("logs-foobar").source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON)
|
||||
@ -770,25 +748,25 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testSearchAllResolvesDataStreams() throws Exception {
|
||||
putComposableIndexTemplate("id1", "@timestamp", List.of("metrics-foo*"));
|
||||
putComposableIndexTemplate("id1", List.of("metrics-foo*"));
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("metrics-foo");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
|
||||
putComposableIndexTemplate("id2", "@timestamp", List.of("metrics-bar*"));
|
||||
putComposableIndexTemplate("id2", List.of("metrics-bar*"));
|
||||
createDataStreamRequest = new CreateDataStreamAction.Request("metrics-bar");
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
|
||||
int numDocsBar = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-bar", "@timestamp", numDocsBar);
|
||||
indexDocs("metrics-bar", numDocsBar);
|
||||
int numDocsFoo = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-foo", "@timestamp", numDocsFoo);
|
||||
indexDocs("metrics-foo", numDocsFoo);
|
||||
|
||||
RolloverResponse rolloverResponse = client().admin().indices().rolloverIndex(new RolloverRequest("metrics-foo", null)).get();
|
||||
assertThat(rolloverResponse.getNewIndex(), equalTo(DataStream.getDefaultBackingIndexName("metrics-foo", 2)));
|
||||
|
||||
// ingest some more data in the rolled data stream
|
||||
int numDocsRolledFoo = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-foo", "@timestamp", numDocsRolledFoo);
|
||||
indexDocs("metrics-foo", numDocsRolledFoo);
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest("*");
|
||||
SearchResponse searchResponse = client().search(searchRequest).actionGet();
|
||||
@ -797,10 +775,10 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
|
||||
public void testGetDataStream() throws Exception {
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, maximumNumberOfReplicas() + 2).build();
|
||||
putComposableIndexTemplate("template_for_foo", "@timestamp", List.of("metrics-foo*"), settings);
|
||||
putComposableIndexTemplate("template_for_foo", null, List.of("metrics-foo*"), settings);
|
||||
|
||||
int numDocsFoo = randomIntBetween(2, 16);
|
||||
indexDocs("metrics-foo", "@timestamp", numDocsFoo);
|
||||
indexDocs("metrics-foo", numDocsFoo);
|
||||
|
||||
GetDataStreamAction.Response response = client().admin()
|
||||
.indices()
|
||||
@ -827,7 +805,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testNoTimestampInDocument() throws Exception {
|
||||
putComposableIndexTemplate("id", "@timestamp", List.of("logs-foobar*"));
|
||||
putComposableIndexTemplate("id", List.of("logs-foobar*"));
|
||||
String dataStreamName = "logs-foobar";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -838,7 +816,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testMultipleTimestampValuesInDocument() throws Exception {
|
||||
putComposableIndexTemplate("id", "@timestamp", List.of("logs-foobar*"));
|
||||
putComposableIndexTemplate("id", List.of("logs-foobar*"));
|
||||
String dataStreamName = "logs-foobar";
|
||||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName);
|
||||
client().admin().indices().createDataStream(createDataStreamRequest).get();
|
||||
@ -974,13 +952,13 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private static void indexDocs(String dataStream, String timestampField, int numDocs) {
|
||||
private static void indexDocs(String dataStream, int numDocs) {
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
String value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis());
|
||||
bulkRequest.add(
|
||||
new IndexRequest(dataStream).opType(DocWriteRequest.OpType.CREATE)
|
||||
.source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", timestampField, value), XContentType.JSON)
|
||||
.source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON)
|
||||
);
|
||||
}
|
||||
BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet();
|
||||
@ -1008,22 +986,13 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { assertTrue(expectedIndices.contains(hit.getIndex())); });
|
||||
}
|
||||
|
||||
public static void putComposableIndexTemplate(String id, String timestampFieldName, java.util.List<String> patterns)
|
||||
throws IOException {
|
||||
String mapping = generateMapping(timestampFieldName);
|
||||
putComposableIndexTemplate(id, timestampFieldName, mapping, patterns, null);
|
||||
}
|
||||
|
||||
public static void putComposableIndexTemplate(String id, String timestampFieldName, java.util.List<String> patterns, Settings settings)
|
||||
throws IOException {
|
||||
String mapping = generateMapping(timestampFieldName);
|
||||
putComposableIndexTemplate(id, timestampFieldName, mapping, patterns, settings);
|
||||
public static void putComposableIndexTemplate(String id, java.util.List<String> patterns) throws IOException {
|
||||
putComposableIndexTemplate(id, null, patterns, null);
|
||||
}
|
||||
|
||||
static void putComposableIndexTemplate(
|
||||
String id,
|
||||
String timestampFieldName,
|
||||
String mapping,
|
||||
@Nullable String mappings,
|
||||
java.util.List<String> patterns,
|
||||
@Nullable Settings settings
|
||||
) throws IOException {
|
||||
@ -1031,12 +1000,12 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||
request.indexTemplate(
|
||||
new ComposableIndexTemplate(
|
||||
patterns,
|
||||
new Template(settings, new CompressedXContent(mapping), null),
|
||||
new Template(settings, mappings == null ? null : new CompressedXContent(mappings), null),
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
new ComposableIndexTemplate.DataStreamTemplate(timestampFieldName)
|
||||
new ComposableIndexTemplate.DataStreamTemplate("@timestamp")
|
||||
)
|
||||
);
|
||||
client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet();
|
||||
|
@ -66,7 +66,7 @@ public class DataStreamsSnapshotsIT extends AbstractSnapshotIntegTestCase {
|
||||
Path location = randomRepoPath();
|
||||
createRepository(REPO, "fs", location);
|
||||
|
||||
DataStreamIT.putComposableIndexTemplate("t1", "@timestamp", List.of("ds", "other-ds"));
|
||||
DataStreamIT.putComposableIndexTemplate("t1", List.of("ds", "other-ds"));
|
||||
|
||||
CreateDataStreamAction.Request request = new CreateDataStreamAction.Request("ds");
|
||||
AcknowledgedResponse response = client.admin().indices().createDataStream(request).get();
|
||||
|
@ -55,7 +55,7 @@ public class ShardClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCase
|
||||
);
|
||||
|
||||
String dataStream = "datastream";
|
||||
DataStreamIT.putComposableIndexTemplate("dst", "@timestamp", Collections.singletonList(dataStream));
|
||||
DataStreamIT.putComposableIndexTemplate("dst", Collections.singletonList(dataStream));
|
||||
|
||||
logger.info("--> indexing some data");
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -11,7 +11,6 @@ import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.cluster.metadata.DataStream;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.Template;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
@ -48,15 +47,6 @@ import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class TimeSeriesDataStreamsIT extends ESRestTestCase {
|
||||
|
||||
private static final String FAILED_STEP_RETRY_COUNT_FIELD = "failed_step_retry_count";
|
||||
public static final String TIMESTAMP_MAPPING = "{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"@timestamp\": {\n" +
|
||||
" \"type\": \"date\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
|
||||
public void testRolloverAction() throws Exception {
|
||||
String policyName = "logs-policy";
|
||||
createNewSingletonPolicy(client(), policyName, "hot", new RolloverAction(null, null, 1L));
|
||||
@ -233,7 +223,7 @@ public class TimeSeriesDataStreamsIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private static Template getTemplate(String policyName) throws IOException {
|
||||
return new Template(getLifcycleSettings(policyName), new CompressedXContent(TIMESTAMP_MAPPING), null);
|
||||
return new Template(getLifcycleSettings(policyName), null, null);
|
||||
}
|
||||
|
||||
private static Settings getLifcycleSettings(String policyName) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user