Mappers: Better validation of mapping JSON

Closes #7205
This commit is contained in:
Colin Goodheart-Smithe 2014-08-20 16:20:36 +01:00
parent bfd1bcd30a
commit 972afe61a0
46 changed files with 485 additions and 142 deletions

View File

@ -28,6 +28,8 @@ import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -37,10 +39,37 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatService;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.ByteFieldMapper;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.Murmur3FieldMapper;
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AnalyzerMapper;
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SizeFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
@ -63,6 +92,7 @@ import static org.elasticsearch.index.mapper.MapperBuilders.doc;
public class DocumentMapperParser extends AbstractIndexComponent {
final AnalysisService analysisService;
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
private final PostingsFormatService postingsFormatService;
private final DocValuesFormatService docValuesFormatService;
private final SimilarityLookupService similarityLookupService;
@ -248,13 +278,13 @@ public class DocumentMapperParser extends AbstractIndexComponent {
} else if ("transform".equals(fieldName)) {
iterator.remove();
if (fieldNode instanceof Map) {
parseTransform(docBuilder, (Map<String, Object>) fieldNode);
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
} else if (fieldNode instanceof List) {
for (Object transformItem: (List)fieldNode) {
if (!(transformItem instanceof Map)) {
throw new MapperParsingException("Elements of transform list must be objects but one was: " + fieldNode);
}
parseTransform(docBuilder, (Map<String, Object>) transformItem);
parseTransform(docBuilder, (Map<String, Object>) transformItem, parserContext.indexVersionCreated());
}
} else {
throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode);
@ -263,7 +293,10 @@ public class DocumentMapperParser extends AbstractIndexComponent {
Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName);
if (typeParser != null) {
iterator.remove();
docBuilder.put(typeParser.parse(fieldName, (Map<String, Object>) fieldNode, parserContext));
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
docBuilder.put(typeParser.parse(fieldName, fieldNodeMap, parserContext));
fieldNodeMap.remove("type");
checkNoRemainingFields(fieldName, fieldNodeMap, parserContext.indexVersionCreated());
}
}
}
@ -274,9 +307,8 @@ public class DocumentMapperParser extends AbstractIndexComponent {
}
docBuilder.meta(attributes);
if (!mapping.isEmpty()) {
throw new MapperParsingException("Root type mapping not empty after parsing! Remaining fields: " + getRemainingFields(mapping));
}
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
@ -293,16 +325,30 @@ public class DocumentMapperParser extends AbstractIndexComponent {
return documentMapper;
}
private String getRemainingFields(Map<String, ?> map) {
public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) {
checkNoRemainingFields(fieldNodeMap, indexVersionCreated, "Mapping definition for [" + fieldName + "] has unsupported parameters: ");
}
public static void checkNoRemainingFields(Map<String, Object> fieldNodeMap, Version indexVersionCreated, String message) {
if (!fieldNodeMap.isEmpty()) {
if (indexVersionCreated.onOrAfter(Version.V_2_0_0)) {
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
} else {
logger.debug(message + "{}", getRemainingFields(fieldNodeMap));
}
}
}
private static String getRemainingFields(Map<String, ?> map) {
StringBuilder remainingFields = new StringBuilder();
for (String key : map.keySet()) {
remainingFields.append(" [").append(key).append(" : ").append(map.get(key).toString()).append("]");
remainingFields.append(" [").append(key).append(" : ").append(map.get(key)).append("]");
}
return remainingFields.toString();
}
@SuppressWarnings("unchecked")
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig) {
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig, Version indexVersionCreated) {
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
scriptParameterParser.parseConfig(transformConfig, true);
@ -319,9 +365,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
Map<String, Object> params = (Map<String, Object>)transformConfig.remove("params");
docBuilder.transform(scriptService, script, scriptType, scriptLang, params);
}
if (!transformConfig.isEmpty()) {
throw new MapperParsingException("Unrecognized parameter in transform config: " + getRemainingFields(transformConfig));
}
checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: ");
}
private Tuple<String, Map<String, Object>> extractMapping(String type, String source) throws MapperParsingException {

View File

@ -47,6 +47,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -104,11 +105,13 @@ public class BinaryFieldMapper extends AbstractFieldMapper<BytesReference> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BinaryFieldMapper.Builder builder = binaryField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
@ -117,6 +120,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper<BytesReference> {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
iterator.remove();
}
}
return builder;

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -107,7 +108,8 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BooleanFieldMapper.Builder builder = booleanField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -115,6 +117,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeBooleanValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -50,6 +50,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -105,7 +106,8 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ByteFieldMapper.Builder builder = byteField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -113,6 +115,7 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeByteValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -30,13 +30,20 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.NumberType;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
import org.elasticsearch.search.suggest.completion.CompletionPostingsFormatProvider;
@ -46,7 +53,12 @@ import org.elasticsearch.search.suggest.context.ContextMapping;
import org.elasticsearch.search.suggest.context.ContextMapping.ContextConfig;
import java.io.IOException;
import java.util.*;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import static org.elasticsearch.index.mapper.MapperBuilders.completionField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
@ -147,7 +159,8 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
CompletionFieldMapper.Builder builder = completionField(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
@ -157,24 +170,32 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
NamedAnalyzer analyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
builder.indexAnalyzer(analyzer);
builder.searchAnalyzer(analyzer);
iterator.remove();
} else if (Fields.INDEX_ANALYZER.match(fieldName)) {
builder.indexAnalyzer(getNamedAnalyzer(parserContext, fieldNode.toString()));
iterator.remove();
} else if (Fields.SEARCH_ANALYZER.match(fieldName)) {
builder.searchAnalyzer(getNamedAnalyzer(parserContext, fieldNode.toString()));
iterator.remove();
} else if (fieldName.equals(Fields.PAYLOADS)) {
builder.payloads(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_SEPARATORS.match(fieldName)) {
builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_POSITION_INCREMENTS.match(fieldName)) {
builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.MAX_INPUT_LENGTH.match(fieldName)) {
builder.maxInputLength(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if ("fields".equals(fieldName) || "path".equals(fieldName)) {
parseMultiField(builder, name, parserContext, fieldName, fieldNode);
if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
}
} else if (fieldName.equals(Fields.CONTEXT)) {
builder.contextMapping(ContextBuilder.loadMappings(fieldNode));
} else {
throw new MapperParsingException("Unknown field [" + fieldName + "]");
builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated()));
iterator.remove();
}
}

View File

@ -56,6 +56,7 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -149,7 +150,8 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DateFieldMapper.Builder builder = dateField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -157,12 +159,16 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(propNode.toString());
iterator.remove();
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
iterator.remove();
} else if (propName.equals("numeric_resolution")) {
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT)));
iterator.remove();
} else if (propName.equals("locale")) {
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
}
}
return builder;

View File

@ -55,6 +55,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -109,7 +110,8 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DoubleFieldMapper.Builder builder = doubleField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("nullValue") || propName.equals("null_value")) {
@ -117,6 +119,7 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeDoubleValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -55,6 +55,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -109,7 +110,8 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
FloatFieldMapper.Builder builder = floatField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -117,6 +119,7 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeFloatValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -51,6 +51,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -105,7 +106,8 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IntegerFieldMapper.Builder builder = integerField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -113,6 +115,7 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeIntegerValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -51,6 +51,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -105,7 +106,8 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
LongFieldMapper.Builder builder = longField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -113,6 +115,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeLongValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -58,7 +58,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
@Override
public Murmur3FieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, ~0L,
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, null,
ignoreMalformed(context), coerce(context), postingsProvider, docValuesProvider, similarity, normsLoading,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
@ -72,6 +72,11 @@ public class Murmur3FieldMapper extends LongFieldMapper {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = murmur3Field(name);
parseNumberField(builder, name, node, parserContext);
// Because this mapper extends LongFieldMapper the null_value field will be added to the JSON when transferring cluster state
// between nodes so we have to remove the entry here so that the validation doesn't fail
// TODO should murmur3 support null_value? at the moment if a user sets null_value it has to be silently ignored since we can't
// determine whether the JSON is the original JSON from the user or if its the serialised cluster state being passed between nodes.
// node.remove("null_value");
return builder;
}
}

View File

@ -52,6 +52,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -107,7 +108,8 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ShortFieldMapper.Builder builder = shortField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -115,6 +117,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeShortValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -37,11 +37,16 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -151,7 +156,8 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
StringFieldMapper.Builder builder = stringField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -159,12 +165,14 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(propNode.toString());
iterator.remove();
} else if (propName.equals("search_quote_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.searchQuotedAnalyzer(analyzer);
iterator.remove();
} else if (propName.equals("position_offset_gap")) {
builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1));
// we need to update to actual analyzers if they are not set in this case...
@ -178,10 +186,12 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
if (builder.searchQuotedAnalyzer == null) {
builder.searchQuotedAnalyzer = parserContext.analysisService().defaultSearchQuoteAnalyzer();
}
iterator.remove();
} else if (propName.equals("ignore_above")) {
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
} else {
parseMultiField(builder, name, parserContext, propName, propNode);
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -92,20 +93,23 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
@SuppressWarnings("unchecked")
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TokenCountFieldMapper.Builder builder = tokenCountField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.analyzer(analyzer);
iterator.remove();
}
}
parseNumberField(builder, name, node, parserContext);
if (builder.analyzer() == null) {
throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't.");
}

View File

@ -30,16 +30,24 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
import static org.elasticsearch.index.mapper.FieldMapper.DOC_VALUES_FORMAT;
/**
@ -57,14 +65,17 @@ public class TypeParsers {
List<AbstractFieldMapper.Builder> fields = null;
String firstType = null;
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
pathType = parsePathType(name, fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("fields")) {
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) {
for (Iterator<Map.Entry<String, Object>> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
Map.Entry<String, Object> entry1 = fieldsIterator.next();
String propName = entry1.getKey();
Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();
@ -85,13 +96,18 @@ public class TypeParsers {
}
if (propName.equals(name)) {
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
fieldsIterator.remove();
} else {
if (fields == null) {
fields = new ArrayList<>(2);
}
fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
fieldsIterator.remove();
}
}
fieldsNode.remove("type");
DocumentMapperParser.checkNoRemainingFields(fieldName, fieldsNode, parserContext.indexVersionCreated());
iterator.remove();
}
}
@ -140,64 +156,88 @@ public class TypeParsers {
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, numberNode, parserContext);
for (Map.Entry<String, Object> entry : numberNode.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("precision_step")) {
builder.precisionStep(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("ignore_malformed")) {
builder.ignoreMalformed(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("coerce")) {
builder.coerce(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("similarity")) {
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
} else {
parseMultiField(builder, name, parserContext, propName, propNode);
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
}
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
for (Map.Entry<String, Object> entry : fieldNode.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("index_name")) {
builder.indexName(propNode.toString());
iterator.remove();
} else if (propName.equals("store")) {
builder.store(parseStore(name, propNode.toString()));
iterator.remove();
} else if (propName.equals("index")) {
parseIndex(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("tokenized")) {
builder.tokenized(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals(DOC_VALUES)) {
builder.docValues(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("norms")) {
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
for (Map.Entry<String, Object> entry2 : properties.entrySet()) {
for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
Entry<String, Object> entry2 = propsIterator.next();
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
final Object propNode2 = entry2.getValue();
if (propName2.equals("enabled")) {
builder.omitNorms(!nodeBooleanValue(propNode2));
propsIterator.remove();
} else if (propName2.equals(Loading.KEY)) {
builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
propsIterator.remove();
}
}
DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
iterator.remove();
} else if (propName.equals("omit_term_freq_and_positions")) {
final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) {
@ -205,8 +245,10 @@ public class TypeParsers {
}
// deprecated option for BW compat
builder.indexOptions(op);
iterator.remove();
} else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
@ -214,40 +256,50 @@ public class TypeParsers {
}
builder.indexAnalyzer(analyzer);
builder.searchAnalyzer(analyzer);
iterator.remove();
} else if (propName.equals("index_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.indexAnalyzer(analyzer);
iterator.remove();
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.searchAnalyzer(analyzer);
iterator.remove();
} else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("postings_format")) {
String postingFormatName = propNode.toString();
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
iterator.remove();
} else if (propName.equals(DOC_VALUES_FORMAT)) {
String docValuesFormatName = propNode.toString();
builder.docValuesFormat(parserContext.docValuesFormatService().get(docValuesFormatName));
iterator.remove();
} else if (propName.equals("similarity")) {
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
iterator.remove();
} else if (propName.equals("fielddata")) {
final Settings settings = ImmutableSettings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build();
builder.fieldDataSettings(settings);
iterator.remove();
} else if (propName.equals("copy_to")) {
parseCopyFields(propNode, builder);
iterator.remove();
}
}
}
public static void parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
public static boolean parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
if (propName.equals("path")) {
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
return true;
} else if (propName.equals("fields")) {
final Map<String, Object> multiFieldsPropNodes;
@ -282,8 +334,12 @@ public class TypeParsers {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + multiFieldName + "]");
}
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
multiFieldNodes.remove("type");
DocumentMapperParser.checkNoRemainingFields(propName, multiFieldNodes, parserContext.indexVersionCreated());
}
return true;
}
return false;
}
private static IndexOptions nodeIndexOptionValue(final Object propNode) {

View File

@ -207,44 +207,57 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoPointField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
builder.multiFieldPathType(parsePathType(name, fieldNode.toString()));
iterator.remove();
} else if (fieldName.equals("lat_lon")) {
builder.enableLatLon(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("geohash")) {
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("geohash_prefix")) {
builder.geohashPrefix(XContentMapValues.nodeBooleanValue(fieldNode));
if (XContentMapValues.nodeBooleanValue(fieldNode)) {
builder.enableGeoHash(true);
}
iterator.remove();
} else if (fieldName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("geohash_precision")) {
if (fieldNode instanceof Integer) {
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(fieldNode));
} else {
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
}
iterator.remove();
} else if (fieldName.equals("validate")) {
builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("validate_lon")) {
builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("validate_lat")) {
builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("normalize")) {
builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("normalize_lat")) {
builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("normalize_lon")) {
builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode);
} else {
parseMultiField(builder, name, parserContext, fieldName, fieldNode);
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
}
}
return builder;

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -172,19 +173,25 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoShapeField(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (Names.TREE.equals(fieldName)) {
builder.tree(fieldNode.toString());
iterator.remove();
} else if (Names.TREE_LEVELS.equals(fieldName)) {
builder.treeLevels(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (Names.TREE_PRESISION.equals(fieldName)) {
builder.treeLevelsByDistance(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
iterator.remove();
} else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
builder.distanceErrorPct(Double.parseDouble(fieldNode.toString()));
iterator.remove();
} else if (Names.STRATEGY.equals(fieldName)) {
builder.strategy(fieldNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -46,6 +46,7 @@ import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -121,13 +122,16 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
AllFieldMapper.Builder builder = all();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("auto_boost")) {
builder.autoBoost = nodeBooleanValue(fieldNode);
iterator.remove();
}
}
return builder;

View File

@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.search.highlight.HighlighterContext;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -68,11 +69,13 @@ public class AnalyzerMapper implements Mapper, InternalMapper, RootMapper {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
AnalyzerMapper.Builder builder = analyzer();
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
builder.field(fieldNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -48,6 +48,7 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -103,14 +104,16 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements Intern
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
String name = node.get("name") == null ? BoostFieldMapper.Defaults.NAME : node.get("name").toString();
String name = node.get("name") == null ? BoostFieldMapper.Defaults.NAME : node.remove("name").toString();
BoostFieldMapper.Builder builder = MapperBuilders.boost(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeFloatValue(propNode));
iterator.remove();
}
}
return builder;

View File

@ -50,6 +50,7 @@ import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -110,11 +111,13 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IdFieldMapper.Builder builder = id();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
builder.path(fieldNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -94,12 +95,14 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements Int
IndexFieldMapper.Builder builder = MapperBuilders.index();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper mapper = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(mapper);
iterator.remove();
}
}
return builder;

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -119,14 +120,17 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ParentFieldMapper.Builder builder = parent();
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("postings_format")) {
String postingFormatName = fieldNode.toString();
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
iterator.remove();
} else if (fieldName.equals("fielddata")) {
// Only take over `loading`, since that is the only option now that is configurable:
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
@ -134,6 +138,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
Settings settings = settingsBuilder().put(Loading.KEY, fieldDataSettings.get(Loading.KEY)).build();
builder.fieldDataSettings(settings);
}
iterator.remove();
}
}
return builder;

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -99,13 +100,16 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements I
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
RoutingFieldMapper.Builder builder = routing();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
builder.required(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("path")) {
builder.path(fieldNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -80,13 +81,16 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SizeFieldMapper.Builder builder = size();
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
} else if (fieldName.equals("store")) {
builder.store(parseStore(fieldName, fieldNode.toString()));
iterator.remove();
}
}
return builder;

View File

@ -48,6 +48,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -139,23 +140,32 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SourceFieldMapper.Builder builder = source();
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
iterator.remove();
} else if (fieldName.equals("compress")) {
if (fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
}
iterator.remove();
} else if (fieldName.equals("compress_threshold")) {
if (fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
}
iterator.remove();
} else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null));
iterator.remove();
} else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()];
@ -163,6 +173,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
includes[i] = values.get(i).toString();
}
builder.includes(includes);
iterator.remove();
} else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()];
@ -170,6 +181,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
excludes[i] = values.get(i).toString();
}
builder.excludes(excludes);
iterator.remove();
}
}
return builder;

View File

@ -39,6 +39,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -98,17 +99,20 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TTLFieldMapper.Builder builder = ttl();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("default")) {
TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null);
if (ttlTimeValue != null) {
builder.defaultTTL(ttlTimeValue.millis());
}
iterator.remove();
}
}
return builder;

View File

@ -39,6 +39,7 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -137,18 +138,23 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TimestampFieldMapper.Builder builder = timestamp();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("path")) {
builder.path(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString()));
iterator.remove();
} else if (fieldName.equals("default")) {
builder.defaultTimestamp(fieldNode == null ? null : fieldNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -75,12 +76,14 @@ public class VersionFieldMapper extends AbstractFieldMapper<Long> implements Int
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = version();
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals(DOC_VALUES_FORMAT)) {
String docValuesFormatName = fieldNode.toString();
builder.docValuesFormat(parserContext.docValuesFormatService().get(docValuesFormatName));
iterator.remove();
}
}
return builder;

View File

@ -55,6 +55,7 @@ import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
@ -139,7 +140,8 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IpFieldMapper.Builder builder = ipField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
@ -147,6 +149,7 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(propNode.toString());
iterator.remove();
}
}
return builder;

View File

@ -36,19 +36,48 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ObjectMapperListener;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.StrictDynamicMappingException;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
import static com.google.common.collect.Lists.newArrayList;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.MapperBuilders.binaryField;
import static org.elasticsearch.index.mapper.MapperBuilders.booleanField;
import static org.elasticsearch.index.mapper.MapperBuilders.dateField;
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
import static org.elasticsearch.index.mapper.MapperBuilders.floatField;
import static org.elasticsearch.index.mapper.MapperBuilders.integerField;
import static org.elasticsearch.index.mapper.MapperBuilders.longField;
import static org.elasticsearch.index.mapper.MapperBuilders.object;
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
/**
@ -183,13 +212,15 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ObjectMapper.Builder builder = createBuilder(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
parseNested(name, node, builder);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder);
parseObjectProperties(name, fieldName, fieldNode, builder);
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) || parseObjectProperties(name, fieldName, fieldNode, builder)) {
iterator.remove();
}
}
parseNested(name, node, builder);
return builder;
}
@ -221,10 +252,12 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
return false;
}
protected static void parseObjectProperties(String name, String fieldName, Object fieldNode, ObjectMapper.Builder builder) {
if (fieldName.equals("path")) {
protected static boolean parseObjectProperties(String name, String fieldName, Object fieldNode, ObjectMapper.Builder builder) {
if (fieldName.equals("path")) {
builder.pathType(parsePathType(name, fieldNode.toString()));
return true;
}
return false;
}
protected static void parseNested(String name, Map<String, Object> node, ObjectMapper.Builder builder) {
@ -245,10 +278,12 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
fieldNode = node.get("include_in_parent");
if (fieldNode != null) {
nestedIncludeInParent = nodeBooleanValue(fieldNode);
node.remove("include_in_parent");
}
fieldNode = node.get("include_in_root");
if (fieldNode != null) {
nestedIncludeInRoot = nodeBooleanValue(fieldNode);
node.remove("include_in_root");
}
if (nested) {
builder.nested = Nested.newNested(nestedIncludeInParent, nestedIncludeInRoot);
@ -257,12 +292,15 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
}
protected static void parseProperties(ObjectMapper.Builder objBuilder, Map<String, Object> propsNode, ParserContext parserContext) {
for (Map.Entry<String, Object> entry : propsNode.entrySet()) {
Iterator<Map.Entry<String, Object>> iterator = propsNode.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
//Should accept empty arrays, as a work around for when the user can't provide an empty Map. (PHP for example)
// Should accept empty arrays, as a work around for when the
// user can't provide an empty Map. (PHP for example)
boolean isEmptyList = entry.getValue() instanceof List && ((List<?>) entry.getValue()).isEmpty();
if (entry.getValue() instanceof Map) {
if (entry.getValue() instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> propNode = (Map<String, Object>) entry.getValue();
String type;
@ -274,9 +312,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
if (propNode.get("properties") != null) {
type = ObjectMapper.CONTENT_TYPE;
} else if (propNode.size() == 1 && propNode.get("enabled") != null) {
// if there is a single property with the enabled flag on it, make it an object
// (usually, setting enabled to false to not index any type, including core values, which
// non enabled object type supports).
// if there is a single property with the enabled
// flag on it, make it an object
// (usually, setting enabled to false to not index
// any type, including core values, which
type = ObjectMapper.CONTENT_TYPE;
} else {
throw new MapperParsingException("No type specified for property [" + propName + "]");
@ -288,10 +327,20 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + propName + "]");
}
objBuilder.add(typeParser.parse(propName, propNode, parserContext));
} else if (!isEmptyList) {
throw new MapperParsingException("Expected map for property [fields] on field [" + propName + "] but got a " + propName.getClass());
propNode.remove("type");
DocumentMapperParser.checkNoRemainingFields(propName, propNode, parserContext.indexVersionCreated());
iterator.remove();
} else if (isEmptyList) {
iterator.remove();
} else {
throw new MapperParsingException("Expected map for property [fields] on field [" + propName + "] but got a "
+ propName.getClass());
}
}
DocumentMapperParser.checkNoRemainingFields(propsNode, parserContext.indexVersionCreated(),
"DocType mapping definition has unsupported parameters: ");
}
protected Builder createBuilder(String name) {

View File

@ -123,6 +123,7 @@ public class CategoryContextMapping extends ContextMapping {
if (fieldName != null) {
mapping.fieldName(fieldName.toString());
config.remove(FIELD_FIELDNAME);
}
if (defaultValues != null) {
@ -133,6 +134,7 @@ public class CategoryContextMapping extends ContextMapping {
} else {
mapping.addDefaultValue(defaultValues.toString());
}
config.remove(FIELD_MISSING);
}
return mapping.build();

View File

@ -21,6 +21,8 @@ package org.elasticsearch.search.suggest.context;
import com.google.common.collect.Maps;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import java.util.Map;
import java.util.Map.Entry;
@ -91,13 +93,14 @@ public abstract class ContextBuilder<E extends ContextMapping> {
return new CategoryContextMapping.Builder(name, fieldname).addDefaultValues(defaultValues);
}
public static SortedMap<String, ContextMapping> loadMappings(Object configuration) throws ElasticsearchParseException {
public static SortedMap<String, ContextMapping> loadMappings(Object configuration, Version indexVersionCreated)
throws ElasticsearchParseException {
if (configuration instanceof Map) {
Map<String, Object> configurations = (Map<String, Object>)configuration;
SortedMap<String, ContextMapping> mappings = Maps.newTreeMap();
for (Entry<String,Object> config : configurations.entrySet()) {
String name = config.getKey();
mappings.put(name, loadMapping(name, (Map<String, Object>) config.getValue()));
mappings.put(name, loadMapping(name, (Map<String, Object>) config.getValue(), indexVersionCreated));
}
return mappings;
} else if (configuration == null) {
@ -107,7 +110,8 @@ public abstract class ContextBuilder<E extends ContextMapping> {
}
}
protected static ContextMapping loadMapping(String name, Map<String, Object> config) throws ElasticsearchParseException {
protected static ContextMapping loadMapping(String name, Map<String, Object> config, Version indexVersionCreated)
throws ElasticsearchParseException {
final Object argType = config.get(ContextMapping.FIELD_TYPE);
if (argType == null) {
@ -115,13 +119,17 @@ public abstract class ContextBuilder<E extends ContextMapping> {
}
final String type = argType.toString();
ContextMapping contextMapping;
if (GeolocationContextMapping.TYPE.equals(type)) {
return GeolocationContextMapping.load(name, config);
contextMapping = GeolocationContextMapping.load(name, config);
} else if (CategoryContextMapping.TYPE.equals(type)) {
return CategoryContextMapping.load(name, config);
contextMapping = CategoryContextMapping.load(name, config);
} else {
throw new ElasticsearchParseException("unknown context type[" + type + "]");
}
config.remove(ContextMapping.FIELD_TYPE);
DocumentMapperParser.checkNoRemainingFields(name, config, indexVersionCreated);
return contextMapping;
}
}

View File

@ -127,12 +127,16 @@ public class GeolocationContextMapping extends ContextMapping {
// ignore precision
} else if (configPrecision instanceof Integer) {
builder.precision((Integer) configPrecision);
config.remove(FIELD_PRECISION);
} else if (configPrecision instanceof Long) {
builder.precision((Long) configPrecision);
config.remove(FIELD_PRECISION);
} else if (configPrecision instanceof Double) {
builder.precision((Double) configPrecision);
config.remove(FIELD_PRECISION);
} else if (configPrecision instanceof Float) {
builder.precision((Float) configPrecision);
config.remove(FIELD_PRECISION);
} else if (configPrecision instanceof Iterable) {
for (Object precision : (Iterable)configPrecision) {
if (precision instanceof Integer) {
@ -147,13 +151,16 @@ public class GeolocationContextMapping extends ContextMapping {
builder.precision(precision.toString());
}
}
config.remove(FIELD_PRECISION);
} else {
builder.precision(configPrecision.toString());
config.remove(FIELD_PRECISION);
}
final Object configNeighbors = config.get(FIELD_NEIGHBORS);
if (configNeighbors != null) {
builder.neighbors((Boolean) configNeighbors);
config.remove(FIELD_NEIGHBORS);
}
final Object def = config.get(FIELD_MISSING);
@ -176,11 +183,13 @@ public class GeolocationContextMapping extends ContextMapping {
} else {
throw new ElasticsearchParseException("field [" + FIELD_MISSING + "] must be of type string or list");
}
config.remove(FIELD_MISSING);
}
final Object fieldName = config.get(FIELD_FIELDNAME);
if (fieldName != null) {
builder.field(fieldName.toString());
config.remove(FIELD_FIELDNAME);
}
}
return builder.build();

View File

@ -473,7 +473,7 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
.startObject(type)
.startObject("_source")
.array("includes", "included")
.array("exlcudes", "excluded")
.array("excludes", "excluded")
.endObject()
.endObject()
.endObject()
@ -806,13 +806,13 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
assertAcked(prepareCreate("my-index")
.setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1))
.addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties")
.startObject("field1").field("type", "object")
.startObject("field2").field("type", "object")
.startObject("field3").field("type", "object")
.startObject("field1").field("type", "object").startObject("properties")
.startObject("field2").field("type", "object").startObject("properties")
.startObject("field3").field("type", "object").startObject("properties")
.startObject("field4").field("type", "string").field("store", "yes")
.endObject()
.endObject()
.endObject()
.endObject().endObject()
.endObject().endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
BytesReference source = jsonBuilder().startObject()
@ -977,8 +977,7 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
" \"enabled\": " + randomBoolean() + "\n" +
" },\n" +
" \"_parent\": {\n" +
" \"type\": \"parentdoc\",\n" +
" \"store\": \"" + storedString + "\"\n" +
" \"type\": \"parentdoc\"\n" +
" },\n" +
" \"_ttl\": {\n" +
" \"enabled\": true,\n" +

View File

@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -127,11 +128,14 @@ public class ExternalMapper extends AbstractFieldMapper<Object> {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ExternalMapper.Builder builder = new ExternalMapper.Builder(name, generatedValue, mapperName);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
parseMultiField(builder, name, parserContext, propName, propNode);
if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;

View File

@ -99,12 +99,12 @@ public class ExternalValuesMapperIntegrationTests extends ElasticsearchIntegrati
.startObject("fields")
.startObject("f")
.field("type", "string")
.field("stored", "yes")
.field("store", "yes")
.startObject("fields")
.startObject("raw")
.field("type", "string")
.field("index", "not_analyzed")
.field("stored", "yes")
.field("store", "yes")
.endObject()
.endObject()
.endObject()

View File

@ -85,12 +85,12 @@ public class SimpleExternalMappingTests extends ElasticsearchSingleNodeLuceneTes
.startObject("fields")
.startObject("field")
.field("type", "string")
.field("stored", "yes")
.field("store", "yes")
.startObject("fields")
.startObject("raw")
.field("type", "string")
.field("index", "not_analyzed")
.field("stored", "yes")
.field("store", "yes")
.endObject()
.endObject()
.endObject()

View File

@ -7,17 +7,13 @@
dynamic:false,
enabled:true,
_id:{
name:"_id",
index_name:"_id"
},
_source:{
name:"_source"
},
_type:{
name:"_type"
},
_boost:{
name:"_boost",
null_value:2.0
},
properties:{

View File

@ -53,17 +53,50 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeSet;
import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
import static org.elasticsearch.common.settings.ImmutableSettings.builder;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.*;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyArray;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*
@ -525,7 +558,8 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
@Test
public void percolateWithSizeField() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_size").field("enabled", true).field("stored", "yes").endObject()
.startObject("_size").field("enabled", true)
.field("store", "yes").endObject()
.startObject("properties").startObject("field1").field("type", "string").endObject().endObject()
.endObject().endObject().string();
@ -1678,7 +1712,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
.startObject("custom")
.field("dynamic", true)
.field("type", "object")
.field("incude_in_all", false)
.field("include_in_all", false)
.endObject()
.endObject()
.startArray("dynamic_templates")
@ -1695,9 +1729,9 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
ensureGreen("idx");
try {
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:red")).endObject())
.get();
client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryString("color:red")).endObject())
.get();
fail();
} catch (PercolatorException e) {

View File

@ -43,7 +43,6 @@ public class ParentIdAggTests extends ElasticsearchIntegrationTest {
.startObject("childtype")
.startObject("_parent")
.field("type", "parenttype")
.field("store", true)
.endObject()
.endObject();
assertAcked(prepareCreate("testidx").addMapping("childtype", mapping));

View File

@ -428,13 +428,13 @@ public class SearchFieldsTests extends ElasticsearchIntegrationTest {
client().admin().indices().prepareCreate("my-index")
.setSettings(ImmutableSettings.settingsBuilder().put("index.refresh_interval", -1))
.addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties")
.startObject("field1").field("type", "object")
.startObject("field2").field("type", "object")
.startObject("field3").field("type", "object")
.startObject("field1").field("type", "object").startObject("properties")
.startObject("field2").field("type", "object").startObject("properties")
.startObject("field3").field("type", "object").startObject("properties")
.startObject("field4").field("type", "string").field("store", "yes")
.endObject()
.endObject()
.endObject()
.endObject().endObject()
.endObject().endObject()
.endObject().endObject()
.endObject().endObject().endObject())
.get();

View File

@ -224,7 +224,6 @@ public class GeoFilterTests extends ElasticsearchIntegrationTest {
.startObject("area")
.field("type", "geo_shape")
.field("tree", "geohash")
.field("store", true)
.endObject()
.endObject()
.endObject()
@ -405,8 +404,6 @@ public class GeoFilterTests extends ElasticsearchIntegrationTest {
.endObject()
.startObject("location")
.field("type", "geo_shape")
.field("lat_lon", true)
.field("store", true)
.endObject()
.endObject()
.endObject()
@ -477,7 +474,7 @@ public class GeoFilterTests extends ElasticsearchIntegrationTest {
ensureYellow();
client().admin().indices().prepareCreate("locations").addMapping("location", "pin", "type=geo_point,geohash_prefix=true,latlon=false").execute().actionGet();
client().admin().indices().prepareCreate("locations").addMapping("location", "pin", "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet();
// Index a pin
client().prepareIndex("locations", "location", "1").setCreate(true).setSource("pin", geohash).execute().actionGet();

View File

@ -1477,7 +1477,7 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest {
@Test
public void testFastVectorHighlighterMultipleFields() {
assertAcked(prepareCreate("test")
.addMapping("type1", "field1", "type=string,term_vectors=with_positions_offsets", "field2", "type=string,term_vectors=with_positions_offsets"));
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets", "field2", "type=string,term_vector=with_positions_offsets"));
ensureGreen();
index("test", "type1", "1", "field1", "The <b>quick<b> brown fox", "field2", "The <b>slow<b> brown fox");

View File

@ -133,19 +133,16 @@ public class MultiMatchQueryTests extends ElasticsearchIntegrationTest {
.startObject("category")
.field("type", "string")
.field("analyzer", "category")
.field("index_option", "docs")
.endObject()
.startObject("first_name")
.field("type", "string")
.field("omit_norms", "true")
.field("copy_to", "first_name_phrase")
.field("index_option", "docs")
.endObject()
.startObject("last_name")
.field("type", "string")
.field("omit_norms", "true")
.field("copy_to", "last_name_phrase")
.field("index_option", "docs")
.endObject()
.endObject()
.endObject().endObject();

View File

@ -135,8 +135,9 @@ public class GeoLocationContextMappingTest extends ElasticsearchTestCase {
pointAsMap.put("lon", 0d);
config.put("default", pointAsMap);
}
HashMap<String, Object> config2 = new HashMap<>(config);
GeolocationContextMapping mapping = GeolocationContextMapping.load("foo", config);
GeolocationContextMapping mapping2 = GeolocationContextMapping.load("foo", config);
GeolocationContextMapping mapping2 = GeolocationContextMapping.load("foo", config2);
assertEquals(mapping, mapping2);
assertEquals(mapping.hashCode(), mapping2.hashCode());