Only text fields should accept analyzer and term vector settings.

Currently almost all our fields accept the `analyzer` and `term_vector` settings
although they only make sense on text fields. This commit forbids those settings
on all fields but `string` and `_all` for indices created on or after version
2.2.0.
This commit is contained in:
Adrien Grand 2015-12-08 10:21:33 +01:00
parent 473e880a7c
commit 1909c3d8cc
4 changed files with 134 additions and 45 deletions

View File

@ -46,7 +46,7 @@ import java.util.Map;
import static org.apache.lucene.index.IndexOptions.NONE; import static org.apache.lucene.index.IndexOptions.NONE;
import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
@ -159,7 +159,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override @Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
StringFieldMapper.Builder builder = stringField(name); StringFieldMapper.Builder builder = stringField(name);
parseField(builder, name, node, parserContext); parseTextField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey()); String propName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -182,9 +182,72 @@ public class TypeParsers {
} }
} }
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) { private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer(); NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer(); NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
indexAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchAnalyzer = analyzer;
iterator.remove();
}
}
if (indexAnalyzer == null) {
if (searchAnalyzer != null) {
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
}
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
builder.indexAnalyzer(indexAnalyzer);
builder.searchAnalyzer(searchAnalyzer);
}
/**
* Parse text field attributes. In addition to {@link #parseField common attributes}
* this will parse analysis and term-vectors related settings.
*/
public static void parseTextField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, fieldNode, parserContext);
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
}
/**
* Parse common field attributes such as {@code doc_values} or {@code store}.
*/
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
Version indexVersionCreated = parserContext.indexVersionCreated(); Version indexVersionCreated = parserContext.indexVersionCreated();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
@ -202,24 +265,9 @@ public class TypeParsers {
} else if (propName.equals(DOC_VALUES)) { } else if (propName.equals(DOC_VALUES)) {
builder.docValues(nodeBooleanValue(propNode)); builder.docValues(nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("boost")) { } else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode)); builder.boost(nodeFloatValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("omit_norms")) { } else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode)); builder.omitNorms(nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
@ -250,22 +298,6 @@ public class TypeParsers {
} else if (propName.equals("index_options")) { } else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode)); builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove(); iterator.remove();
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
indexAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("include_in_all")) { } else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode)); builder.includeInAll(nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
@ -296,16 +328,11 @@ public class TypeParsers {
iterator.remove(); iterator.remove();
} }
} }
if (indexVersionCreated.before(Version.V_2_2_0)) {
if (indexAnalyzer == null) { // analyzer, search_analyzer, term_vectors were accepted on all fields
if (searchAnalyzer != null) { // before 2.2, even though it made little sense
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set"); parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
} }
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
builder.indexAnalyzer(indexAnalyzer);
builder.searchAnalyzer(searchAnalyzer);
} }
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {

View File

@ -49,7 +49,7 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
/** /**
* *
@ -134,7 +134,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
} }
} }
parseField(builder, builder.name, node, parserContext); parseTextField(builder, builder.name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -24,6 +24,8 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
@ -41,9 +43,11 @@ import org.elasticsearch.index.mapper.string.SimpleStringMappingTests;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
@ -510,4 +514,62 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
assertThat(ts, instanceOf(NumericTokenStream.class)); assertThat(ts, instanceOf(NumericTokenStream.class));
assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep()); assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep());
} }
public void testTermVectorsBackCompat() throws Exception {
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
doTestTermVectorsBackCompat(type);
}
}
private void doTestTermVectorsBackCompat(String type) throws Exception {
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", type)
.field("term_vector", "yes")
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]"));
}
Settings oldIndexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
}
public void testAnalyzerBackCompat() throws Exception {
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
doTestAnalyzerBackCompat(type);
}
}
private void doTestAnalyzerBackCompat(String type) throws Exception {
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", type)
.field("analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]"));
}
Settings oldIndexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
}
} }