_timestamp: enable mapper properties merging
Updates on the _timestamp field were silently ignored. Now _timestamp undergoes the same merge as regular fields. This includes exceptions if a property cannot be changed. "path" and "default" cannot be changed. closes #5772 closes #6958 closes #7614 partially fixes #777
This commit is contained in:
parent
bfedd11ffa
commit
ee5221bd22
|
@ -249,7 +249,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
|
||||||
if (includeDefaults || enabledState != Defaults.ENABLED) {
|
if (includeDefaults || enabledState != Defaults.ENABLED) {
|
||||||
builder.field("enabled", enabledState.enabled);
|
builder.field("enabled", enabledState.enabled);
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType.indexed() != Defaults.FIELD_TYPE.indexed()) {
|
if (includeDefaults || (fieldType.indexed() != Defaults.FIELD_TYPE.indexed()) || (fieldType.tokenized() != Defaults.FIELD_TYPE.tokenized())) {
|
||||||
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
|
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
|
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
|
||||||
|
@ -277,10 +277,22 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
|
||||||
@Override
|
@Override
|
||||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||||
TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith;
|
TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith;
|
||||||
|
super.merge(mergeWith, mergeContext);
|
||||||
if (!mergeContext.mergeFlags().simulate()) {
|
if (!mergeContext.mergeFlags().simulate()) {
|
||||||
if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) {
|
if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) {
|
||||||
this.enabledState = timestampFieldMapperMergeWith.enabledState;
|
this.enabledState = timestampFieldMapperMergeWith.enabledState;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) {
|
||||||
|
mergeContext.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||||
|
}
|
||||||
|
if (this.path != null) {
|
||||||
|
if (path.equals(timestampFieldMapperMergeWith.path()) == false) {
|
||||||
|
mergeContext.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
|
||||||
|
}
|
||||||
|
} else if (timestampFieldMapperMergeWith.path() != null) {
|
||||||
|
mergeContext.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,15 +33,17 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.mapper.SourceToParse;
|
import org.elasticsearch.index.mapper.SourceToParse;
|
||||||
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Locale;
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
|
@ -89,7 +91,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
||||||
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
|
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
|
||||||
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.stored()));
|
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.stored()));
|
||||||
assertThat(docMapper.timestampFieldMapper().fieldType().indexed(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexed()));
|
assertThat(docMapper.timestampFieldMapper().fieldType().indexed(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexed()));
|
||||||
assertThat(docMapper.timestampFieldMapper().path(), equalTo(null));
|
assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH));
|
||||||
assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT));
|
assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -390,4 +392,172 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
||||||
assertThat(metaData, is(expected));
|
assertThat(metaData, is(expected));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergingFielddataLoadingWorks() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping);
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY));
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(docMapper.timestampFieldMapper().fieldDataType().getSettings()), equalTo("doc_values"));
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
|
||||||
|
assertThat(mergeResult.conflicts().length, equalTo(0));
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER));
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(docMapper.timestampFieldMapper().fieldDataType().getSettings()), equalTo("array"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", true)
|
||||||
|
.field("index", randomBoolean() ? "no" : "analyzed") // default is "not_analyzed" which will be omitted when building the source again
|
||||||
|
.field("store", true)
|
||||||
|
.field("path", "foo")
|
||||||
|
.field("default", "1970-01-01")
|
||||||
|
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("properties")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping);
|
||||||
|
docMapper.refreshSource();
|
||||||
|
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||||
|
assertThat(docMapper.mappingSource().string(), equalTo(mapping));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParsingTwiceDoesNotChangeTokenizeValue() throws Exception {
|
||||||
|
String[] index_options = {"no", "analyzed", "not_analyzed"};
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", true)
|
||||||
|
.field("index", index_options[randomInt(2)])
|
||||||
|
.field("store", true)
|
||||||
|
.field("path", "foo")
|
||||||
|
.field("default", "1970-01-01")
|
||||||
|
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("properties")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping);
|
||||||
|
boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized();
|
||||||
|
docMapper.refreshSource();
|
||||||
|
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||||
|
assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergingConflicts() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", true)
|
||||||
|
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||||
|
.field("store", "yes")
|
||||||
|
.field("index", "analyzed")
|
||||||
|
.field("path", "foo")
|
||||||
|
.field("default", "1970-01-01")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping);
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY));
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", false)
|
||||||
|
.startObject("fielddata").field("format", "array").endObject()
|
||||||
|
.field("store", "no")
|
||||||
|
.field("index", "no")
|
||||||
|
.field("path", "bar")
|
||||||
|
.field("default", "1970-01-02")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
|
||||||
|
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different store values", "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar", "mapper [_timestamp] has different tokenize values"};
|
||||||
|
|
||||||
|
for (String conflict : mergeResult.conflicts()) {
|
||||||
|
assertThat(conflict, isIn(expectedConflicts));
|
||||||
|
}
|
||||||
|
assertThat(mergeResult.conflicts().length, equalTo(expectedConflicts.length));
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY));
|
||||||
|
assertTrue(docMapper.timestampFieldMapper().enabled());
|
||||||
|
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(docMapper.timestampFieldMapper().fieldDataType().getSettings()), equalTo("doc_values"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergingConflictsForIndexValues() throws Exception {
|
||||||
|
List<String> indexValues = new ArrayList<>();
|
||||||
|
indexValues.add("analyzed");
|
||||||
|
indexValues.add("no");
|
||||||
|
indexValues.add("not_analyzed");
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("_timestamp")
|
||||||
|
.field("index", indexValues.remove(randomInt(2)))
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping);
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("_timestamp")
|
||||||
|
.field("index", indexValues.remove(randomInt(1)))
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
|
||||||
|
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different tokenize values"};
|
||||||
|
|
||||||
|
for (String conflict : mergeResult.conflicts()) {
|
||||||
|
assertThat(conflict, isIn(expectedConflicts));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMergePaths() throws Exception {
|
||||||
|
String[] possiblePathValues = {"some_path", "anotherPath", null};
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("_timestamp");
|
||||||
|
String path1 = possiblePathValues[randomInt(2)];
|
||||||
|
if (path1!=null) {
|
||||||
|
mapping1.field("path", path1);
|
||||||
|
}
|
||||||
|
mapping1.endObject()
|
||||||
|
.endObject().endObject();
|
||||||
|
XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("_timestamp");
|
||||||
|
String path2 = possiblePathValues[randomInt(2)];
|
||||||
|
if (path2!=null) {
|
||||||
|
mapping2.field("path", path2);
|
||||||
|
}
|
||||||
|
mapping2.endObject()
|
||||||
|
.endObject().endObject();
|
||||||
|
|
||||||
|
testConflict(mapping1.string(), mapping2.string(), parser, (path1 == path2 ? null : "Cannot update path in _timestamp value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
void testConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException {
|
||||||
|
DocumentMapper docMapper = parser.parse(mapping1);
|
||||||
|
docMapper.refreshSource();
|
||||||
|
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||||
|
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping2), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
|
||||||
|
assertThat(mergeResult.conflicts().length, equalTo(conflict == null ? 0:1));
|
||||||
|
if (conflict != null) {
|
||||||
|
assertThat(mergeResult.conflicts()[0], containsString(conflict));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,13 +20,18 @@
|
||||||
package org.elasticsearch.index.mapper.update;
|
package org.elasticsearch.index.mapper.update;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
|
@ -126,4 +131,62 @@ public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(previousMapping.getMappings().get(INDEX).get(TYPE).source(), equalTo(currentMapping.getMappings().get(INDEX).get(TYPE).source()));
|
assertThat(previousMapping.getMappings().get(INDEX).get(TYPE).source(), equalTo(currentMapping.getMappings().get(INDEX).get(TYPE).source()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdateTimestamp() throws IOException {
|
||||||
|
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject()
|
||||||
|
.endObject().endObject();
|
||||||
|
client().admin().indices().prepareCreate("test").addMapping("type", mapping).get();
|
||||||
|
GetMappingsResponse appliedMappings = client().admin().indices().prepareGetMappings("test").get();
|
||||||
|
LinkedHashMap timestampMapping = (LinkedHashMap) appliedMappings.getMappings().get("test").get("type").getSourceAsMap().get("_timestamp");
|
||||||
|
assertThat((Boolean) timestampMapping.get("store"), equalTo(true));
|
||||||
|
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("loading"), equalTo("lazy"));
|
||||||
|
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("format"), equalTo("doc_values"));
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
|
||||||
|
.endObject().endObject();
|
||||||
|
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
|
||||||
|
appliedMappings = client().admin().indices().prepareGetMappings("test").get();
|
||||||
|
timestampMapping = (LinkedHashMap) appliedMappings.getMappings().get("test").get("type").getSourceAsMap().get("_timestamp");
|
||||||
|
assertThat((Boolean) timestampMapping.get("store"), equalTo(true));
|
||||||
|
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("loading"), equalTo("eager"));
|
||||||
|
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("format"), equalTo("array"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTimestampMergingConflicts() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject(TYPE)
|
||||||
|
.startObject("_timestamp").field("enabled", true)
|
||||||
|
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||||
|
.field("store", "yes")
|
||||||
|
.field("index", "analyzed")
|
||||||
|
.field("path", "foo")
|
||||||
|
.field("default", "1970-01-01")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping).get();
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", false)
|
||||||
|
.startObject("fielddata").field("format", "array").endObject()
|
||||||
|
.field("store", "no")
|
||||||
|
.field("index", "no")
|
||||||
|
.field("path", "bar")
|
||||||
|
.field("default", "1970-01-02")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||||
|
try {
|
||||||
|
client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mapping).get();
|
||||||
|
fail("This should result in conflicts when merging the mapping");
|
||||||
|
} catch (MergeMappingException e) {
|
||||||
|
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different store values", "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"};
|
||||||
|
for (String conflict : expectedConflicts) {
|
||||||
|
assertThat(e.getDetailedMessage(), containsString(conflict));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
compareMappingOnNodes(mappingsBeforeUpdateResponse);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,7 +122,7 @@ public class SimpleTimestampTests extends ElasticsearchIntegrationTest {
|
||||||
assertTimestampMappingEnabled(index, type, true);
|
assertTimestampMappingEnabled(index, type, true);
|
||||||
|
|
||||||
// update some field in the mapping
|
// update some field in the mapping
|
||||||
XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", false).endObject().endObject();
|
XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", false).field("store", true).endObject().endObject();
|
||||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get();
|
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get();
|
||||||
assertAcked(putMappingResponse);
|
assertAcked(putMappingResponse);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue