Remove friction from the mapping changes in 5.0. #16991

This tries to remove friction to upgrade to 5.0 that would be caused by mapping
changes:
 - old ways to specify mapping settings (eg. store: yes instead of store:true)
   will still work but a deprecation warning will be logged
 - string mappings that only use the most common options will be upgraded
   automatically to text/keyword
This commit is contained in:
Adrien Grand 2016-03-07 18:23:33 +01:00
parent 32bdb3d617
commit cb2ed50aeb
3 changed files with 256 additions and 39 deletions

View File

@ -26,6 +26,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -39,9 +42,12 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import static org.apache.lucene.index.IndexOptions.NONE; import static org.apache.lucene.index.IndexOptions.NONE;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
@ -52,6 +58,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
public static final String CONTENT_TYPE = "string"; public static final String CONTENT_TYPE = "string";
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
private static final Set<String> SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE = new HashSet<>(Arrays.asList(
"type",
// most common parameters, for which the upgrade is straightforward
"index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to"));
public static class Defaults { public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new StringFieldType(); public static final MappedFieldType FIELD_TYPE = new StringFieldType();
@ -130,13 +141,33 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
} }
public static class TypeParser implements Mapper.TypeParser { public static class TypeParser implements Mapper.TypeParser {
private final DeprecationLogger deprecationLogger;
public TypeParser() {
ESLogger logger = Loggers.getLogger(getClass());
this.deprecationLogger = new DeprecationLogger(logger);
}
@Override @Override
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
// TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
/*if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { // Automatically upgrade simple mappings for ease of upgrade, otherwise fail
if (SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE.containsAll(node.keySet())) {
deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]",
fieldName);
final Object index = node.remove("index");
final boolean keyword = index != null && "analyzed".equals(index) == false;
// upgrade the index setting
node.put("index", "no".equals(index) == false);
if (keyword) {
return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext);
} else {
return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext);
}
}
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
+ "or [keyword] field instead for field [" + fieldName + "]"); + "or [keyword] field instead for field [" + fieldName + "]");
}*/ }
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName); StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName);
// hack for the fact that string can't just accept true/false for // hack for the fact that string can't just accept true/false for
// the index property and still accepts no/not_analyzed/analyzed // the index property and still accepts no/not_analyzed/analyzed
@ -241,11 +272,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
int positionIncrementGap, int ignoreAbove, int positionIncrementGap, int ignoreAbove,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
// TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
/*if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
+ "or [keyword] field instead for field [" + fieldType.name() + "]"); + "or [keyword] field instead for field [" + fieldType.name() + "]");
}*/ }
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values");
} }

View File

@ -25,7 +25,9 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -39,11 +41,14 @@ import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray; import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
@ -63,10 +68,18 @@ public class TypeParsers {
public static final String INDEX_OPTIONS_POSITIONS = "positions"; public static final String INDEX_OPTIONS_POSITIONS = "positions";
public static final String INDEX_OPTIONS_OFFSETS = "offsets"; public static final String INDEX_OPTIONS_OFFSETS = "offsets";
private static boolean nodeBooleanValue(Object node, Mapper.TypeParser.ParserContext parserContext) { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeParsers.class));
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { private static final Set<String> BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false"));
private static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) {
// Hook onto ParseFieldMatcher so that parsing becomes strict when setting index.query.parse.strict
if (parserContext.parseFieldMatcher().isStrict()) {
return XContentMapValues.nodeBooleanValue(node); return XContentMapValues.nodeBooleanValue(node);
} else { } else {
// TODO: remove this leniency in 6.0
if (BOOLEAN_STRINGS.contains(node.toString()) == false) {
DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node);
}
return XContentMapValues.lenientNodeBooleanValue(node); return XContentMapValues.lenientNodeBooleanValue(node);
} }
} }
@ -81,13 +94,13 @@ public class TypeParsers {
builder.precisionStep(nodeIntegerValue(propNode)); builder.precisionStep(nodeIntegerValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("ignore_malformed")) { } else if (propName.equals("ignore_malformed")) {
builder.ignoreMalformed(nodeBooleanValue(propNode, parserContext)); builder.ignoreMalformed(nodeBooleanValue("ignore_malformed", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("coerce")) { } else if (propName.equals("coerce")) {
builder.coerce(nodeBooleanValue(propNode, parserContext)); builder.coerce(nodeBooleanValue("coerce", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("omit_norms")) { } else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode, parserContext)); builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("similarity")) { } else if (propName.equals("similarity")) {
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
@ -112,16 +125,16 @@ public class TypeParsers {
parseTermVector(name, propNode.toString(), builder); parseTermVector(name, propNode.toString(), builder);
iterator.remove(); iterator.remove();
} else if (propName.equals("store_term_vectors")) { } else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode, parserContext)); builder.storeTermVectors(nodeBooleanValue("store_term_vectors", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) { } else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode, parserContext)); builder.storeTermVectorOffsets(nodeBooleanValue("store_term_vector_offsets", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("store_term_vector_positions")) { } else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode, parserContext)); builder.storeTermVectorPositions(nodeBooleanValue("store_term_vector_positions", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) { } else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode, parserContext)); builder.storeTermVectorPayloads(nodeBooleanValue("store_term_vector_payloads", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("analyzer")) { } else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
@ -199,13 +212,13 @@ public class TypeParsers {
builder.index(parseIndex(name, propNode.toString(), parserContext)); builder.index(parseIndex(name, propNode.toString(), parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals(DOC_VALUES)) { } else if (propName.equals(DOC_VALUES)) {
builder.docValues(nodeBooleanValue(propNode, parserContext)); builder.docValues(nodeBooleanValue(DOC_VALUES, propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("boost")) { } else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode)); builder.boost(nodeFloatValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("omit_norms")) { } else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode, parserContext)); builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("norms")) { } else if (propName.equals("norms")) {
final Map<String, Object> properties = nodeMapValue(propNode, "norms"); final Map<String, Object> properties = nodeMapValue(propNode, "norms");
@ -227,7 +240,7 @@ public class TypeParsers {
builder.indexOptions(nodeIndexOptionValue(propNode)); builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("include_in_all")) { } else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode, parserContext)); builder.includeInAll(nodeBooleanValue("include_in_all", propNode, parserContext));
iterator.remove(); iterator.remove();
} else if (propName.equals("similarity")) { } else if (propName.equals("similarity")) {
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
@ -353,35 +366,32 @@ public class TypeParsers {
} }
public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { switch (index) {
switch (index) { case "true":
case "true": return true;
return true; case "false":
case "false": return false;
return false; case "not_analyzed":
default: case "analyzed":
case "no":
if (parserContext.parseFieldMatcher().isStrict() == false) {
DEPRECATION_LOGGER.deprecated("Expected a boolean for property [index] but got [{}]", index);
return "no".equals(index) == false;
} else {
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]");
} }
} else { default:
final String normalizedIndex = Strings.toUnderscoreCase(index); throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]");
switch (normalizedIndex) {
case "true":
case "not_analyzed":
case "analyzed":
return true;
case "false":
case "no":
return false;
default:
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
}
} }
} }
public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { if (parserContext.parseFieldMatcher().isStrict()) {
return XContentMapValues.nodeBooleanValue(store); return XContentMapValues.nodeBooleanValue(store);
} else { } else {
if (BOOLEAN_STRINGS.contains(store) == false) {
DEPRECATION_LOGGER.deprecated("Expected a boolean for property [store] but got [{}]", store);
}
if ("no".equals(store)) { if ("no".equals(store)) {
return false; return false;
} else if ("yes".equals(store)) { } else if ("yes".equals(store)) {

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testUpgradeDefaults() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(TextFieldMapper.class));
}
public void testUpgradeAnalyzedString() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("index", "analyzed").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(TextFieldMapper.class));
}
public void testUpgradeNotAnalyzedString() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string")
.field("index", "not_analyzed").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(KeywordFieldMapper.class));
}
public void testUpgradeNotIndexedString() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("index", "no").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(KeywordFieldMapper.class));
assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
}
public void testNotSupportedUpgrade() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "keyword").endObject().endObject()
.endObject().endObject().string();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0"));
}
public void testUpgradeRandomMapping() throws IOException {
final int iters = 20;
for (int i = 0; i < iters; ++i) {
doTestUpgradeRandomMapping(i);
}
}
private void doTestUpgradeRandomMapping(int iter) throws IOException {
IndexService indexService;
boolean oldIndex = randomBoolean();
String indexName = "test" + iter;
if (oldIndex) {
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0)
.build();
indexService = createIndex(indexName, settings);
} else {
indexService = createIndex(indexName);
}
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string");
boolean keyword = randomBoolean();
boolean shouldUpgrade = true;
if (keyword) {
mapping.field("index", randomBoolean() ? "not_analyzed" : "no");
} else if (randomBoolean()) {
mapping.field("index", "analyzed");
}
if (randomBoolean()) {
mapping.field("store", RandomPicks.randomFrom(random(), Arrays.asList("yes", "no", true, false)));
}
if (keyword && randomBoolean()) {
mapping.field("doc_values", randomBoolean());
}
if (randomBoolean()) {
mapping.field("omit_norms", randomBoolean());
}
if (randomBoolean()) {
mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject();
}
if (randomBoolean()) {
mapping.field("copy_to", "bar");
}
if (randomBoolean()) {
// this option is not upgraded automatically
mapping.field("index_options", "docs");
shouldUpgrade = false;
}
mapping.endObject().endObject().endObject().endObject();
if (oldIndex == false && shouldUpgrade == false) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping.string())));
assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0"));
} else {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
FieldMapper field = mapper.mappers().getMapper("field");
if (oldIndex) {
assertThat(field, instanceOf(StringFieldMapper.class));
} else if (keyword) {
assertThat(field, instanceOf(KeywordFieldMapper.class));
} else {
assertThat(field, instanceOf(TextFieldMapper.class));
}
}
}
}