Fix LineLength Check Suppressions: index.mapper (#35087)

Relates #34884
This commit is contained in:
Vladimir Dolzhenko 2018-10-30 18:00:14 +01:00 committed by GitHub
parent 70e939ee38
commit be75b40a29
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 760 additions and 530 deletions

View File

@ -103,27 +103,6 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngine.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]LiveVersionMap.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]get[/\\]ShardGetService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentFieldMappers.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentParser.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DynamicTemplate.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldNamesFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldTypeLookup.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IndexFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MappedFieldType.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]Mapping.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MetadataFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ObjectMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RootObjectMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RoutingFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]SourceFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]TypeFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]VersionFieldMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
@ -229,29 +208,6 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PreBuiltAnalyzerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngineMergeIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngineTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldTypeTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CopyToMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentParserTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DynamicMappingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalMetadataMapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldNamesFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoPointFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IndexFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperServiceTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MultiFieldCopyToMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MultiFieldTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]NestedObjectMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]NullValueObjectMappingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ObjectMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]PathMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RoutingFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]SourceFieldMapperTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]UpdateMappingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]GeoDistanceQueryBuilderTests.java" checks="LineLength" />

View File

@ -117,7 +117,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
CompletionFieldMapper.Builder builder = new CompletionFieldMapper.Builder(name);
NamedAnalyzer indexAnalyzer = null;
NamedAnalyzer searchAnalyzer = null;
@ -368,7 +369,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
*/
public Builder maxInputLength(int maxInputLength) {
if (maxInputLength <= 0) {
throw new IllegalArgumentException(Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]");
throw new IllegalArgumentException(Fields.MAX_INPUT_LENGTH.getPreferredName()
+ " must be > 0 but was [" + maxInputLength + "]");
}
this.maxInputLength = maxInputLength;
return this;
@ -400,13 +402,15 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
completionFieldType.setContextMappings(contextMappings);
completionFieldType.setPreservePositionIncrements(preservePositionIncrements);
completionFieldType.setPreserveSep(preserveSeparators);
return new CompletionFieldMapper(name, this.fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, maxInputLength);
return new CompletionFieldMapper(name, this.fieldType, context.indexSettings(),
multiFieldsBuilder.build(this, context), copyTo, maxInputLength);
}
}
private int maxInputLength;
public CompletionFieldMapper(String simpleName, MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, int maxInputLength) {
public CompletionFieldMapper(String simpleName, MappedFieldType fieldType, Settings indexSettings,
MultiFields multiFields, CopyTo copyTo, int maxInputLength) {
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
this.maxInputLength = maxInputLength;
}
@ -506,7 +510,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
* "STRING" - interpreted as the field value (input)
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
*/
private void parse(ParseContext parseContext, Token token, XContentParser parser, Map<String, CompletionInputMetaData> inputMap) throws IOException {
private void parse(ParseContext parseContext, Token token,
XContentParser parser, Map<String, CompletionInputMetaData> inputMap) throws IOException {
String currentFieldName = null;
if (token == Token.VALUE_STRING) {
inputMap.put(parser.text(), new CompletionInputMetaData(parser.text(), Collections.emptyMap(), 1));
@ -518,7 +523,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
if (token == Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES);
throw new IllegalArgumentException("unknown field name [" + currentFieldName
+ "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES);
}
} else if (currentFieldName != null) {
if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
@ -529,7 +535,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
if (token == Token.VALUE_STRING) {
inputs.add(parser.text());
} else {
throw new IllegalArgumentException("input array must have string values, but was [" + token.name() + "]");
throw new IllegalArgumentException("input array must have string values, but was ["
+ token.name() + "]");
}
}
} else {
@ -552,8 +559,10 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
} else {
throw new IllegalArgumentException("weight must be a number or string, but was [" + token.name() + "]");
}
if (weightValue.longValue() < 0 || weightValue.longValue() > Integer.MAX_VALUE) { // always parse a long to make sure we don't get overflow
throw new IllegalArgumentException("weight must be in the interval [0..2147483647], but was [" + weightValue.longValue() + "]");
// always parse a long to make sure we don't get overflow
if (weightValue.longValue() < 0 || weightValue.longValue() > Integer.MAX_VALUE) {
throw new IllegalArgumentException("weight must be in the interval [0..2147483647], but was ["
+ weightValue.longValue() + "]");
}
weight = weightValue.intValue();
} else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) {
@ -587,7 +596,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [" + parser.currentName() + "]: expected text or object, but got " + token.name());
throw new ParsingException(parser.getTokenLocation(), "failed to parse [" + parser.currentName()
+ "]: expected text or object, but got " + token.name());
}
}

View File

@ -72,8 +72,9 @@ public class DocumentMapper implements ToXContentFragment {
this.rootObjectMapper = builder.build(builderContext);
final String type = rootObjectMapper.name();
DocumentMapper existingMapper = mapperService.documentMapper(type);
for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) {
final DocumentMapper existingMapper = mapperService.documentMapper(type);
final Map<String, TypeParser> metadataMapperParsers = mapperService.mapperRegistry.getMetadataMapperParsers();
for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : metadataMapperParsers.entrySet()) {
final String name = entry.getKey();
final MetadataFieldMapper existingMetadataMapper = existingMapper == null
? null

View File

@ -107,7 +107,8 @@ final class DocumentParser {
}
if (Objects.equals(source.type(), docMapper.type()) == false) {
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]");
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type ["
+ docMapper.type() + "]");
}
}
@ -136,7 +137,8 @@ final class DocumentParser {
// empty doc, we can handle it...
return true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
throw new MapperParsingException("Malformed content, after first object, either the type field"
+ " or the actual properties should exist");
}
}
return false;
@ -355,7 +357,8 @@ final class DocumentParser {
String currentFieldName = parser.currentName();
if (token.isValue()) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value");
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName
+ "] as object, but found a concrete value");
}
ObjectMapper.Nested nested = mapper.nested();
@ -379,7 +382,8 @@ final class DocumentParser {
}
}
private static void innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser, String currentFieldName, XContentParser.Token token) throws IOException {
private static void innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser,
String currentFieldName, XContentParser.Token token) throws IOException {
while (token != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_OBJECT) {
parseObject(context, mapper, currentFieldName);
@ -388,12 +392,14 @@ final class DocumentParser {
} else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (MapperService.isMetadataField(context.path().pathAsText(currentFieldName))) {
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside a document. Use the index API request parameters.");
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
}
} else if (token == XContentParser.Token.VALUE_NULL) {
parseNullValue(context, mapper, currentFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?");
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName
+ "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) {
parseValue(context, mapper, currentFieldName, token);
}
@ -558,7 +564,8 @@ final class DocumentParser {
}
}
private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException {
private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper,
String lastFieldName, String arrayFieldName) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -571,16 +578,19 @@ final class DocumentParser {
} else if (token == XContentParser.Token.VALUE_NULL) {
parseNullValue(context, mapper, lastFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName
+ "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else {
parseValue(context, mapper, lastFieldName, token);
}
}
}
private static void parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
private static void parseValue(final ParseContext context, ObjectMapper parentMapper,
String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with"
+ " no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
final String[] paths = splitAndValidatePath(currentFieldName);
@ -609,7 +619,8 @@ final class DocumentParser {
}
}
private static Mapper.Builder<?,?> createBuilderFromFieldType(final ParseContext context, MappedFieldType fieldType, String currentFieldName) {
private static Mapper.Builder<?,?> createBuilderFromFieldType(final ParseContext context,
MappedFieldType fieldType, String currentFieldName) {
Mapper.Builder builder = null;
if (fieldType instanceof TextFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "text", XContentFieldType.STRING);
@ -671,7 +682,9 @@ final class DocumentParser {
return builder;
}
private static Mapper.Builder<?,?> createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException {
private static Mapper.Builder<?,?> createBuilderFromDynamicValue(final ParseContext context,
XContentParser.Token token,
String currentFieldName) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
String text = context.parser().text();
@ -771,10 +784,12 @@ final class DocumentParser {
}
}
// TODO how do we identify dynamically that its a binary value?
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name ["
+ currentFieldName + "]");
}
private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper,
String currentFieldName, XContentParser.Token token) throws IOException {
ObjectMapper.Dynamic dynamic = dynamicOrDefault(parentMapper, context);
if (dynamic == ObjectMapper.Dynamic.STRICT) {
throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName);
@ -885,8 +900,8 @@ final class DocumentParser {
context.path());
mapper = (ObjectMapper) builder.build(builderContext);
if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i])
+ "]) through `copy_to` or dots in field names");
throw new MapperParsingException("It is forbidden to create dynamic nested objects (["
+ context.path().pathAsText(paths[i]) + "]) through `copy_to` or dots in field names");
}
context.addDynamicMapper(mapper);
break;

View File

@ -228,7 +228,8 @@ public class DynamicTemplate implements ToXContentObject {
try {
matchType.matches(regex, "");
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Pattern [" + regex + "] of type [" + matchType + "] is invalid. Cannot create dynamic template [" + name + "].", e);
throw new IllegalArgumentException("Pattern [" + regex + "] of type [" + matchType
+ "] is invalid. Cannot create dynamic template [" + name + "].", e);
}
}
}
@ -320,14 +321,16 @@ public class DynamicTemplate implements ToXContentObject {
private Map<String, Object> processMap(Map<String, Object> map, String name, String dynamicType) {
Map<String, Object> processedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey().replace("{name}", name).replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType);
String key = entry.getKey().replace("{name}", name).replace("{dynamic_type}", dynamicType)
.replace("{dynamicType}", dynamicType);
Object value = entry.getValue();
if (value instanceof Map) {
value = processMap((Map<String, Object>) value, name, dynamicType);
} else if (value instanceof List) {
value = processList((List) value, name, dynamicType);
} else if (value instanceof String) {
value = value.toString().replace("{name}", name).replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType);
value = value.toString().replace("{name}", name).replace("{dynamic_type}", dynamicType)
.replace("{dynamicType}", dynamicType);
}
processedMap.put(key, value);
}
@ -342,7 +345,9 @@ public class DynamicTemplate implements ToXContentObject {
} else if (value instanceof List) {
value = processList((List) value, name, dynamicType);
} else if (value instanceof String) {
value = value.toString().replace("{name}", name).replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType);
value = value.toString().replace("{name}", name)
.replace("{dynamic_type}", dynamicType)
.replace("{dynamicType}", dynamicType);
}
processedList.add(value);
}

View File

@ -91,7 +91,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
// can happen when an existing type on the same index has disabled indexing
// since we inherit the default field type from the first mapper that is
// created on an index
throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types of the same index");
throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types"
+ " of the same index");
}
fieldType.setIndexOptions(options);
}
@ -227,7 +228,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
protected MultiFields multiFields;
protected CopyTo copyTo;
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName);
assert indexSettings != null;
this.indexCreatedVersion = Version.indexCreated(indexSettings);
@ -325,7 +327,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
if (mergeWith instanceof FieldMapper) {
mergedType = ((FieldMapper) mergeWith).contentType();
}
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType()
+ "], merged_type [" + mergedType + "]");
}
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
multiFields = multiFields.merge(fieldMergeWith.multiFields);
@ -414,12 +417,13 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
}
} else {
boolean hasDefaultIndexAnalyzer = fieldType().indexAnalyzer().name().equals("default");
boolean hasDifferentSearchAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false;
boolean hasDifferentSearchQuoteAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().searchQuoteAnalyzer().name()) == false;
final String searchAnalyzerName = fieldType().searchAnalyzer().name();
boolean hasDifferentSearchAnalyzer = searchAnalyzerName.equals(fieldType().indexAnalyzer().name()) == false;
boolean hasDifferentSearchQuoteAnalyzer = searchAnalyzerName.equals(fieldType().searchQuoteAnalyzer().name()) == false;
if (includeDefaults || hasDefaultIndexAnalyzer == false || hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
builder.field("analyzer", fieldType().indexAnalyzer().name());
if (includeDefaults || hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
builder.field("search_analyzer", searchAnalyzerName);
if (includeDefaults || hasDifferentSearchQuoteAnalyzer) {
builder.field("search_quote_analyzer", fieldType().searchQuoteAnalyzer().name());
}
@ -521,7 +525,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
}
public void parse(FieldMapper mainField, ParseContext context) throws IOException {
// TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part of the mappings
// TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part
// of the mappings
if (mappers.isEmpty()) {
return;
}

View File

@ -104,7 +104,8 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {

View File

@ -211,7 +211,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (Names.TREE_PRESISION.equals(fieldName)) {
builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(),
DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
iterator.remove();
} else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString()));
@ -229,7 +230,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
builder.coerce(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE));
iterator.remove();
} else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) {
builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()));
builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode,
name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()));
iterator.remove();
} else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)) {
pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY);
@ -314,11 +316,14 @@ public class GeoShapeFieldMapper extends FieldMapper {
// must be by the time freeze is called.
SpatialPrefixTree prefixTree;
if ("geohash".equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if ("legacyquadtree".equals(tree)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else if ("quadtree".equals(tree)) {
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
@ -503,8 +508,9 @@ public class GeoShapeFieldMapper extends FieldMapper {
}
return;
} else if (shape instanceof Point == false) {
throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " +
((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found");
throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a "
+ ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass())
+ " was found");
}
}
indexShape(context, shape);

View File

@ -84,7 +84,8 @@ public class IdFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
throw new MapperParsingException(NAME + " is not configurable");
}
@ -157,7 +158,8 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
final IndexFieldData<?> fieldData = fieldDataBuilder.build(indexSettings, fieldType, cache, breakerService, mapperService);
final IndexFieldData<?> fieldData = fieldDataBuilder.build(indexSettings, fieldType, cache,
breakerService, mapperService);
return new IndexFieldData<AtomicFieldData>() {
@Override
@ -182,7 +184,8 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested);
XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue,
sortMode, nested);
return new SortField(getFieldName(), source, reverse);
}

View File

@ -79,7 +79,8 @@ public class IndexFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
throw new MapperParsingException(NAME + " is not configurable");
}
@ -131,7 +132,8 @@ public class IndexFieldMapper extends MetadataFieldMapper {
if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) {
return Queries.newMatchAllQuery();
} else {
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value);
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName()
+ " vs. " + value);
}
}

View File

@ -138,9 +138,11 @@ public abstract class MappedFieldType extends FieldType {
/** Checks this type is the same type as other. Adds a conflict if they are different. */
private void checkTypeName(MappedFieldType other) {
if (typeName().equals(other.typeName()) == false) {
throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName()
+ "] to [" + other.typeName() + "]");
} else if (getClass() != other.getClass()) {
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName());
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and "
+ other.getClass().getSimpleName());
}
}
@ -338,31 +340,38 @@ public abstract class MappedFieldType extends FieldType {
}
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
throw new IllegalArgumentException("Can only use fuzzy queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
throw new IllegalArgumentException("Can only use fuzzy queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) {
throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query wildcardQuery(String value,
@Nullable MultiTermQuery.RewriteMethod method,
QueryShardContext context) {
throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) {
throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method,
QueryShardContext context) {
throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public abstract Query existsQuery(QueryShardContext context);
public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException {
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]");
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException {
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]");
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
/**

View File

@ -195,7 +195,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* Update mapping by only merging the metadata that is different between received and stored entries
*/
public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException {
assert newIndexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + newIndexMetaData.getIndex();
assert newIndexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index()
+ " but was " + newIndexMetaData.getIndex();
// go over and add the relevant mappings (or update them)
Set<String> existingMappers = new HashSet<>();
if (mapper != null) {
@ -227,15 +228,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} else if (logger.isTraceEnabled()) {
logger.trace("[{}] {} mapping [{}], source [{}]", index(), op, mappingType, incomingMappingSource.string());
} else {
logger.debug("[{}] {} mapping [{}] (source suppressed due to length, use TRACE level if needed)", index(), op, mappingType);
logger.debug("[{}] {} mapping [{}] (source suppressed due to length, use TRACE level if needed)",
index(), op, mappingType);
}
// refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same
// mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the
// merge version of it, which it does when refreshing the mappings), and warn log it.
if (documentMapper(mappingType).mappingSource().equals(incomingMappingSource) == false) {
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index(), mappingType,
incomingMappingSource, documentMapper(mappingType).mappingSource());
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}",
index(), mappingType, incomingMappingSource, documentMapper(mappingType).mappingSource());
requireRefresh = true;
}
@ -287,7 +289,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size());
for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
try {
mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(entry.getValue()))));
mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(
XContentFactory.jsonBuilder().map(entry.getValue()))));
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
}
@ -304,7 +307,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return internalMerge(Collections.singletonMap(type, mappingSource), reason).get(type);
}
private synchronized Map<String, DocumentMapper> internalMerge(IndexMetaData indexMetaData, MergeReason reason, boolean onlyUpdateIfNeeded) {
private synchronized Map<String, DocumentMapper> internalMerge(IndexMetaData indexMetaData,
MergeReason reason, boolean onlyUpdateIfNeeded) {
Map<String, CompressedXContent> map = new LinkedHashMap<>();
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
MappingMetaData mappingMetaData = cursor.value;
@ -379,10 +383,12 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
throw new InvalidTypeNameException("mapping type name is empty");
}
if (type.length() > 255) {
throw new InvalidTypeNameException("mapping type name [" + type + "] is too long; limit is length 255 but was [" + type.length() + "]");
throw new InvalidTypeNameException("mapping type name [" + type + "] is too long; limit is length 255 but was ["
+ type.length() + "]");
}
if (type.charAt(0) == '_' && SINGLE_MAPPING_NAME.equals(type) == false) {
throw new InvalidTypeNameException("mapping type name [" + type + "] can't start with '_' unless it is called [" + SINGLE_MAPPING_NAME + "]");
throw new InvalidTypeNameException("mapping type name [" + type + "] can't start with '_' unless it is called ["
+ SINGLE_MAPPING_NAME + "]");
}
if (type.contains("#")) {
throw new InvalidTypeNameException("mapping type name [" + type + "] should not include '#' in it");
@ -395,8 +401,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
private synchronized Map<String, DocumentMapper> internalMerge(@Nullable DocumentMapper defaultMapper, @Nullable String defaultMappingSource,
DocumentMapper mapper, MergeReason reason) {
private synchronized Map<String, DocumentMapper> internalMerge(@Nullable DocumentMapper defaultMapper,
@Nullable String defaultMappingSource, DocumentMapper mapper,
MergeReason reason) {
boolean hasNested = this.hasNested;
Map<String, ObjectMapper> fullPathObjectMappers = this.fullPathObjectMappers;
FieldTypeLookup fieldTypes = this.fieldTypes;
@ -418,7 +425,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
{
if (mapper != null && this.mapper != null && Objects.equals(this.mapper.type(), mapper.type()) == false) {
throw new IllegalArgumentException(
"Rejecting mapping update to [" + index().getName() + "] as the final mapping would have more than 1 type: " + Arrays.asList(this.mapper.type(), mapper.type()));
"Rejecting mapping update to [" + index().getName() + "] as the final mapping would have more than 1 type: "
+ Arrays.asList(this.mapper.type(), mapper.type()));
}
}
@ -475,7 +483,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
// deserializing cluster state that was sent by the master node,
// this check will be skipped.
// Also, don't take metadata mappers into account for the field limit check
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() - metadataMappers.length + fieldAliasMappers.size() );
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() - metadataMappers.length
+ fieldAliasMappers.size() );
}
results.put(newMapper.type(), newMapper);
@ -562,14 +571,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
if (actualNestedFields > allowedNestedFields) {
throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName() + "] has been exceeded");
throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName()
+ "] has been exceeded");
}
}
private void checkTotalFieldsLimit(long totalMappers) {
long allowedTotalFields = indexSettings.getValue(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING);
if (allowedTotalFields < totalMappers) {
throw new IllegalArgumentException("Limit of total fields [" + allowedTotalFields + "] in index [" + index().getName() + "] has been exceeded");
throw new IllegalArgumentException("Limit of total fields [" + allowedTotalFields + "] in index [" + index().getName()
+ "] has been exceeded");
}
}

View File

@ -48,7 +48,8 @@ public final class Mapping implements ToXContentFragment {
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
final Map<String, Object> meta;
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper,
MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
this.indexCreated = indexCreated;
this.metadataMappers = metadataMappers;
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();

View File

@ -33,7 +33,8 @@ public abstract class MetadataFieldMapper extends FieldMapper {
public interface TypeParser extends Mapper.TypeParser {
@Override
MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException;
/**
* Get the default {@link MetadataFieldMapper} to use, if nothing had to be parsed.

View File

@ -173,7 +173,8 @@ public class ObjectMapper extends Mapper implements Cloneable {
return builder;
}
protected static boolean parseObjectOrDocumentTypeProperties(String fieldName, Object fieldNode, ParserContext parserContext, ObjectMapper.Builder builder) {
protected static boolean parseObjectOrDocumentTypeProperties(String fieldName, Object fieldNode, ParserContext parserContext,
ObjectMapper.Builder builder) {
if (fieldName.equals("dynamic")) {
String value = fieldNode.toString();
if (value.equalsIgnoreCase("strict")) {
@ -215,7 +216,8 @@ public class ObjectMapper extends Mapper implements Cloneable {
} else if (type.equals(NESTED_CONTENT_TYPE)) {
nested = true;
} else {
throw new MapperParsingException("Trying to parse an object but has a different type [" + type + "] for [" + name + "]");
throw new MapperParsingException("Trying to parse an object but has a different type [" + type
+ "] for [" + name + "]");
}
}
fieldNode = node.get("include_in_parent");
@ -433,7 +435,8 @@ public class ObjectMapper extends Mapper implements Cloneable {
@Override
public ObjectMapper merge(Mapper mergeWith) {
if (!(mergeWith instanceof ObjectMapper)) {
throw new IllegalArgumentException("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
throw new IllegalArgumentException("Can't merge a non object mapping [" + mergeWith.name()
+ "] with an object mapping [" + name() + "]");
}
ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;
ObjectMapper merged = clone();
@ -522,7 +525,8 @@ public class ObjectMapper extends Mapper implements Cloneable {
if (nested.isIncludeInRoot()) {
builder.field("include_in_root", true);
}
} else if (mappers.isEmpty() && custom == null) { // only write the object content type if there are no properties, otherwise, it is automatically detected
} else if (mappers.isEmpty() && custom == null) {
// only write the object content type if there are no properties, otherwise, it is automatically detected
builder.field("type", CONTENT_TYPE);
}
if (dynamic != null) {

View File

@ -55,7 +55,8 @@ public class RootObjectMapper extends ObjectMapper {
public static class Builder extends ObjectMapper.Builder<Builder, RootObjectMapper> {
protected Explicit<DynamicTemplate[]> dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false);
protected Explicit<FormatDateTimeFormatter[]> dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit<FormatDateTimeFormatter[]> dynamicDateTimeFormatters =
new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit<Boolean> dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false);
protected Explicit<Boolean> numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false);

View File

@ -82,7 +82,8 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();

View File

@ -107,7 +107,8 @@ public class SourceFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {

View File

@ -79,7 +79,8 @@ public class TypeFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
throw new MapperParsingException(NAME + " is not configurable");
}
@ -161,7 +162,8 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
deprecationLogger.deprecatedAndMaybeLog("range_single_type",
"Running [range] query on [_type] field for an index with a single type. As types are deprecated, this functionality will be removed in future releases.");
"Running [range] query on [_type] field for an index with a single type."
+ " As types are deprecated, this functionality will be removed in future releases.");
Query result = new MatchAllDocsQuery();
String type = context.getMapperService().documentMapper().type();
if (type != null) {

View File

@ -58,7 +58,8 @@ public class VersionFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
throw new MapperParsingException(NAME + " is not configurable");
}

View File

@ -69,7 +69,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -101,7 +102,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -135,7 +137,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class));
@ -159,11 +162,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", "suggestion")
.endObject()),
@ -179,11 +183,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", 1.0)
.endObject()),
@ -212,10 +217,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.array("keywordfield", "key1", "key2", "key3")
.endObject()),
@ -266,10 +272,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("suggest")
.array("input","timmy","starbucks")
@ -321,10 +328,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.array("suggest", "timmy","starbucks")
.array("cat","cafe","food")
@ -357,10 +365,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("geofield", "drm3btev3e86")//"41.12,-71.34"
.endObject()),
@ -387,10 +396,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("suggest", "suggestion")
.endObject()),
@ -418,10 +428,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("completion")
.array("input","New York", "NY")
@ -455,10 +466,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("completion")
.array("input","New York", "NY")
@ -494,10 +506,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
);
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", "suggestion")
.endObject()),
@ -520,11 +533,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.array("completion", "suggestion1", "suggestion2")
.endObject()),
@ -543,11 +557,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("completion")
.field("input", "suggestion")
@ -568,11 +583,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("completion")
.array("input", "suggestion1", "suggestion2", "suggestion3")
@ -635,11 +651,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
@ -672,11 +689,12 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
@ -712,10 +730,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
try {
defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field1")
.field("input", "suggestion1")
@ -739,13 +758,14 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
charsRefBuilder.append("sugg");
charsRefBuilder.setCharAt(2, '\u001F');
try {
defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", charsRefBuilder.get().toString())
.endObject()),
@ -759,8 +779,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
charsRefBuilder.setCharAt(2, '\u0000');
try {
defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", charsRefBuilder.get().toString())
.endObject()),
@ -774,8 +794,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
charsRefBuilder.setCharAt(2, '\u001E');
try {
defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("completion", charsRefBuilder.get().toString())
.endObject()),
@ -788,8 +808,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
}
// empty inputs are ignored
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.array("completion", " ", "")
.endObject()),
@ -801,8 +821,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
assertThat(ignoredFields.stringValue(), equalTo("completion"));
// null inputs are ignored
ParsedDocument nullDoc = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument nullDoc = defaultMapper.parse(SourceToParse.source("test", "type1", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("completion")
.endObject()),
@ -819,7 +839,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co"));
@ -833,7 +854,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
@ -850,7 +872,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType()

View File

@ -18,8 +18,6 @@
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import org.junit.Before;
@ -52,7 +50,8 @@ public class CompletionFieldTypeTests extends FieldTypeTestCase {
@Override
public void modify(MappedFieldType ft) {
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
ContextMappings contextMappings = new ContextMappings(Arrays.asList(ContextBuilder.category("foo").build(), ContextBuilder.geo("geo").build()));
ContextMappings contextMappings = new ContextMappings(Arrays.asList(ContextBuilder.category("foo").build(),
ContextBuilder.geo("geo").build()));
cft.setContextMappings(contextMappings);
}
});

View File

@ -145,7 +145,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
@ -172,7 +173,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
@ -209,7 +211,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
@ -239,7 +242,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
@ -273,7 +277,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
@ -283,7 +288,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
assertThat(ex.getMessage(),
startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
}
}
@ -307,12 +313,14 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore),
MapperService.MergeReason.MAPPING_UPDATE);
FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test");
assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields());
DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter),
MapperService.MergeReason.MAPPING_UPDATE);
FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test");
assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields());
@ -385,7 +393,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endArray()
.endObject();
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc), XContentType.JSON));
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(jsonDoc), XContentType.JSON));
assertEquals(6, doc.docs().size());
Document nested = doc.docs().get(0);
@ -544,7 +553,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")

View File

@ -135,19 +135,22 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
{
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true).endObject());
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true)
.endObject());
MapperException exception = expectThrows(MapperException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)));
assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long]"));
}
{
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar").endObject());
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar")
.endObject());
MapperException exception = expectThrows(MapperException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON)));
assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean]"));
}
{
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123).endObject());
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123)
.endObject());
MapperException exception = expectThrows(MapperException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON)));
assertThat(exception.getMessage(), containsString("failed to parse field [geo] of type [geo_shape]"));
@ -222,7 +225,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
doc.endObject();
// Verify in the case where only a single type is allowed that the _id field is added to nested documents:
ParsedDocument result = mapper.parse(SourceToParse.source("index2", "type", "1", BytesReference.bytes(doc), XContentType.JSON));
ParsedDocument result = mapper.parse(SourceToParse.source("index2", "type", "1",
BytesReference.bytes(doc), XContentType.JSON));
assertEquals(2, result.docs().size());
// Nested document:
assertNotNull(result.docs().get(0).getField(IdFieldMapper.NAME));
@ -463,7 +467,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseLongArray() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -478,7 +483,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictLongArray() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -495,7 +501,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testMappedGeoPointArray() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "geo_point").field("doc_values", false)
.startObject("properties").startObject("foo").field("type", "geo_point")
.field("doc_values", false)
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -544,7 +551,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseObject() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -558,7 +566,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictObject() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -573,7 +582,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseValue() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -587,7 +597,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictValue() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -602,7 +613,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseNull() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -616,7 +628,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictNull() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -741,7 +754,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseDottedFieldNameLongArray() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -756,7 +770,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictDottedFieldNameLongArray() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -860,7 +875,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseDottedFieldNameLong() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -873,7 +889,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictDottedFieldNameLong() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -942,12 +959,12 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo")
.field("type", "object").endObject().endObject().endObject().endObject());
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "object").endObject().endObject().endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
BytesReference bytes = BytesReference
.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject());
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz")
.field("a", 0).endObject().endObject());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON));
assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length);
Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo");
@ -972,8 +989,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
BytesReference bytes = BytesReference
.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject());
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz")
.field("a", 0).endObject().endObject());
MapperParsingException exception = expectThrows(MapperParsingException.class,
() -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)));
@ -983,7 +1000,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicFalseDottedFieldNameObject() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -997,7 +1015,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDynamicStrictDottedFieldNameObject() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
@ -1020,7 +1039,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document"));
BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject());
BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("foo._ttl", 0).endObject());
mapper.parse(SourceToParse.source("test", "type", "1", bytes2, XContentType.JSON)); // parses without error
}
@ -1028,7 +1048,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapper docMapper = new DocumentMapper.Builder(
new RootObjectMapper.Builder("person")
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
.add(new ObjectMapper.Builder("name")
.add(new TextFieldMapper.Builder("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
@ -1053,7 +1074,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testSimpleParser() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("person", new CompressedXContent(mapping));
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
@ -1065,7 +1087,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testSimpleParserNoTypeNoId() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("person", new CompressedXContent(mapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json"));
Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc();
assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
@ -1088,7 +1111,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test");
DocumentMapper docMapper = new DocumentMapper.Builder(
new RootObjectMapper.Builder("person")
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
.add(new ObjectMapper.Builder("name")
.add(new TextFieldMapper.Builder("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
@ -1103,7 +1127,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testNoLevel() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1122,7 +1147,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeLevel() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1141,7 +1167,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsValue() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1162,7 +1189,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsValue() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1183,7 +1211,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsObject() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1204,7 +1233,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsObject() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1225,7 +1255,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1246,7 +1277,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1267,7 +1299,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -1289,7 +1322,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception {
String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(defaultMapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()

View File

@ -67,7 +67,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(jsonBuilder()
@ -89,7 +90,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(jsonBuilder()
@ -112,10 +114,12 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(jsonBuilder()
StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class,
() -> defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
@ -123,8 +127,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
XContentType.JSON)));
assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed"));
e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
e = expectThrows(StrictDynamicMappingException.class,
() -> defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", (String) null)
@ -143,10 +148,11 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(jsonBuilder()
.startObject().startObject("obj1")
.field("field1", "value1")
.field("field2", "value2")
@ -168,11 +174,12 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () ->
defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(jsonBuilder()
defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(jsonBuilder()
.startObject().startObject("obj1")
.field("field1", "value1")
.field("field2", "value2")
@ -200,7 +207,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1).numberOfReplicas(0).build();
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", BytesReference.bytes(builder), builder.contentType());
SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id",
BytesReference.bytes(builder), builder.contentType());
try (XContentParser xContentParser = createParser(JsonXContent.jsonXContent, source.source())) {
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, xContentParser);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
@ -264,7 +272,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar")
.field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
@ -292,7 +301,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar")
.field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
@ -327,14 +337,17 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar")
.field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text")
.startObject("fields").startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject()
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz")
.field("type", "text")
.startObject("fields").startObject("keyword").field("type", "keyword")
.field("ignore_above", 256).endObject()
.endObject().endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject()), serialize(update));
}
@ -349,7 +362,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject());
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject()
.startArray("foo").value("bar").value("baz").endArray().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
@ -377,13 +391,15 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo")
.startObject("bar").field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text").startObject("fields")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties")
.startObject("baz").field("type", "text").startObject("fields")
.startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject()
.endObject().endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject()), serialize(update));
@ -580,7 +596,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.field("baz", (double) 3.2f) // double that can be accurately represented as a float
.field("quux", "3.2") // float detected through numeric detection
.endObject());
ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source, builder.contentType()));
ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id",
source, builder.contentType()));
Mapping update = parsedDocument.dynamicMappingsUpdate();
assertNotNull(update);
assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float"));

View File

@ -92,8 +92,8 @@ public class ExternalMapper extends FieldMapper {
setupFieldType(context);
return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper,
shapeMapper, stringMapper, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
@ -156,7 +156,8 @@ public class ExternalMapper extends FieldMapper {
public ExternalMapper(String simpleName, MappedFieldType fieldType,
String generatedValue, String mapperName,
BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper,
GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings,
MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, new ExternalFieldType(), indexSettings, multiFields, copyTo);
this.generatedValue = generatedValue;
this.mapperName = mapperName;

View File

@ -24,13 +24,6 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext;
import java.io.IOException;
import java.util.Collections;
@ -99,7 +92,8 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
return new Builder();
}

View File

@ -67,7 +67,8 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.startObject("_field_names").endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().hasDocValues());
assertEquals(IndexOptions.DOCS, fieldNamesMapper.fieldType().indexOptions());
@ -78,10 +79,11 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
public void testInjectIntoDocDuringParsing() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("a", "100")
.startObject("b")
@ -96,14 +98,16 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
public void testExplicitEnabled() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", true).endObject()
.startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
.startObject("properties")
.startObject("field").field("type", "keyword").field("doc_values", false).endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertTrue(fieldNamesMapper.fieldType().isEnabled());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()),
@ -116,12 +120,13 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", false).endObject()
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
assertFalse(fieldNamesMapper.fieldType().isEnabled());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()),
@ -139,8 +144,10 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping),
MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping),
MapperService.MergeReason.MAPPING_UPDATE);
assertFalse(mapperDisabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE);

View File

@ -62,10 +62,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", stringEncode(1.3, 1.2))
.endObject()),
@ -78,10 +79,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()),
@ -94,10 +96,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false);
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startObject().field("lat", 1.2).field("lon", 1.3).endObject()
@ -115,11 +118,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type",
new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()),
@ -133,11 +136,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), true);
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type",
new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3,10.0")
.endObject()),
@ -151,11 +154,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), false);
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type",
new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
SourceToParse source = SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
SourceToParse source = SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3,10.0")
.endObject()),
@ -169,11 +172,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type",
new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()),
@ -185,11 +188,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false);
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type",
new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.value("1.2,1.3")
@ -207,10 +210,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
@ -224,10 +228,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startArray("dynamic_templates").startObject().startObject("point").field("match", "point*")
.startObject("mapping").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
@ -240,10 +245,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
@ -256,12 +262,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLonLatArrayArrayStored() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject()
String mapping = Strings.toString(xContentBuilder.field("store", true)
.field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startArray().value(1.3).value(1.2).endArray()
@ -311,7 +319,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testMultiField() throws Exception {
int numDocs = randomIntBetween(10, 100);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.startObject("fields")
.startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword
@ -326,13 +335,15 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i=0; i<numDocs; ++i) {
final GeoPoint pt = RandomGeoGenerator.randomPoint(random());
client().prepareIndex("test", "pin").setSource(jsonBuilder().startObject().startObject("location").field("lat", pt.lat())
client().prepareIndex("test", "pin").setSource(jsonBuilder().startObject()
.startObject("location").field("lat", pt.lat())
.field("lon", pt.lon()).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get();
}
// TODO these tests are bogus and need to be Fix
// query by geohash subfield
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet();
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash")
.setQuery(matchAllQuery()).execute().actionGet();
assertEquals(numDocs, searchResponse.getHits().getTotalHits());
// query by latlon subfield
@ -370,8 +381,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
Object nullValue = ((GeoPointFieldMapper) fieldMapper).fieldType().nullValue();
assertThat(nullValue, equalTo(new GeoPoint(1, 2)));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("location")
.endObject()),
@ -380,8 +391,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("location"), notNullValue());
BytesRef defaultValue = doc.rootDoc().getField("location").binaryValue();
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1, 2")
.endObject()),
@ -389,8 +400,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
// Shouldn't matter if we specify the value explicitly or use null value
assertThat(defaultValue, equalTo(doc.rootDoc().getField("location").binaryValue()));
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "3, 4")
.endObject()),
@ -411,8 +422,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1234.333")
.endObject()),
@ -434,8 +445,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.parse("type", new CompressedXContent(mapping));
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
() -> defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1234.333")
.endObject()),

View File

@ -58,7 +58,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -82,7 +83,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -99,7 +101,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -120,7 +123,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -135,7 +139,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -171,7 +176,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -190,7 +196,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -205,7 +212,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -224,7 +232,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -247,7 +256,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -394,7 +404,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
@ -455,14 +466,19 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
public void testGeoShapeMapperMerge() throws Exception {
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive")
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
.startObject("shape").field("type", "geo_shape").field("tree", "geohash")
.field("strategy", "recursive")
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01)
.field("orientation", "ccw")
.endObject().endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE);
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping),
MapperService.MergeReason.MAPPING_UPDATE);
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
.startObject("properties").startObject("shape").field("type", "geo_shape")
.field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km")
.field("tree_levels", 26).field("distance_error_pct", 26)
.field("orientation", "cw").endObject().endObject().endObject().endObject());
try {
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
@ -490,7 +506,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
// correct mapping
stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject());
.field("tree_levels", 8).field("distance_error_pct", 0.001)
.field("orientation", "cw").endObject().endObject().endObject().endObject());
docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = docMapper.mappers().getMapper("shape");
@ -599,7 +616,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));

View File

@ -46,7 +46,8 @@ public class IdFieldMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectNotAllowed() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
try {
docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
@ -61,7 +62,8 @@ public class IdFieldMapperTests extends ESSingleNodeTestCase {
Settings indexSettings = Settings.EMPTY;
MapperService mapperService = createIndex("test", indexSettings).mapperService();
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
ParsedDocument document = mapper.parse(SourceToParse.source("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
ParsedDocument document = mapper.parse(SourceToParse.source("index", "type", "id",
new BytesArray("{}"), XContentType.JSON));
IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME);
assertEquals(1, fields.length);
assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions());

View File

@ -44,10 +44,11 @@ public class IndexFieldMapperTests extends ESSingleNodeTestCase {
public void testDefaultDisabledIndexMapper() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()),

View File

@ -35,7 +35,6 @@ import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collection;
@ -77,7 +76,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
.addMapping(type, field, "type=text")
.execute().actionGet();
});
assertTrue(e.getMessage(), e.getMessage().contains("mapping type name [" + type + "] is too long; limit is length 255 but was [256]"));
assertTrue(e.getMessage(), e.getMessage().contains("mapping type name [" + type
+ "] is too long; limit is length 255 but was [256]"));
}
public void testTypeValidation() {
@ -92,9 +92,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
public void testIndexIntoDefaultMapping() throws Throwable {
// 1. test implicit index creation
ExecutionException e = expectThrows(ExecutionException.class, () -> {
client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "1").setSource("{}", XContentType.JSON).execute().get();
});
ExecutionException e = expectThrows(ExecutionException.class,
() -> client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "1")
.setSource("{}", XContentType.JSON).execute().get());
Throwable throwable = ExceptionsHelper.unwrapCause(e.getCause());
if (throwable instanceof IllegalArgumentException) {
assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage());
@ -122,14 +122,15 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
*/
public void testTotalFieldsLimit() throws Throwable {
int totalFieldsLimit = randomIntBetween(1, 10);
Settings settings = Settings.builder().put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), totalFieldsLimit).build();
Settings settings = Settings.builder().put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), totalFieldsLimit)
.build();
createIndex("test1", settings).mapperService().merge("type", createMappingSpecifyingNumberOfFields(totalFieldsLimit),
MergeReason.MAPPING_UPDATE);
// adding one more field should trigger exception
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
createIndex("test2", settings).mapperService().merge("type", createMappingSpecifyingNumberOfFields(totalFieldsLimit + 1),
MergeReason.MAPPING_UPDATE);
createIndex("test2", settings).mapperService().merge("type",
createMappingSpecifyingNumberOfFields(totalFieldsLimit + 1), MergeReason.MAPPING_UPDATE);
});
assertTrue(e.getMessage(),
e.getMessage().contains("Limit of total fields [" + totalFieldsLimit + "] in index [test2] has been exceeded"));
@ -148,7 +149,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
}
public void testMappingDepthExceedsLimit() throws Throwable {
IndexService indexService1 = createIndex("test1", Settings.builder().put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), 1).build());
IndexService indexService1 = createIndex("test1",
Settings.builder().put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), 1).build());
// no exception
indexService1.mapperService().merge("type", createMappingSpecifyingNumberOfFields(1), MergeReason.MAPPING_UPDATE);
@ -310,7 +312,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), Matchers.startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
}
/**
@ -319,15 +321,17 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
*/
public void testForbidMultipleTypesWithConflictingMappings() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field1").field("type", "integer_range").endObject().endObject().endObject().endObject());
.startObject("properties").startObject("field1").field("type", "integer_range")
.endObject().endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2")
.startObject("properties").startObject("field1").field("type", "integer").endObject().endObject().endObject().endObject());
.startObject("properties").startObject("field1").field("type", "integer")
.endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), Matchers.startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
}
public void testDefaultMappingIsRejectedOn7() throws IOException {
@ -335,8 +339,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
MapperService mapperService = createIndex("test").mapperService();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("_default_", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
assertEquals("The [default] mapping cannot be updated on index [test]: defaults mappings are not useful anymore now that indices " +
"can have at most one type.", e.getMessage());
assertEquals("The [default] mapping cannot be updated on index [test]: defaults mappings are not useful anymore now"
+ " that indices can have at most one type.", e.getMessage());
}
}

View File

@ -43,7 +43,8 @@ public class MultiFieldCopyToMapperTests extends ESTestCase {
mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true);
fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field."));
assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c]"
+ " which is within a multi field."));
}
}

View File

@ -135,7 +135,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
String builtMapping = builderDocMapper.mappingSource().string();
// reparse it
DocumentMapper docMapper = indexService.mapperService().documentMapperParser().parse("person", new CompressedXContent(builtMapping));
DocumentMapper docMapper = indexService.mapperService().documentMapperParser()
.parse("person", new CompressedXContent(builtMapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));
@ -177,13 +178,15 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
}
builder = builder.endObject().endObject().endObject().endObject().endObject();
String mapping = Strings.toString(builder);
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
Arrays.sort(multiFieldNames);
Map<String, Object> sourceAsMap =
XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true, builder.contentType()).v2();
@SuppressWarnings("unchecked")
Map<String, Object> multiFields = (Map<String, Object>) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap);
Map<String, Object> multiFields =
(Map<String, Object>) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap);
assertThat(multiFields.size(), equalTo(multiFieldNames.length));
int i = 0;
@ -195,7 +198,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testObjectFieldNotAllowed() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
.field("type", "text").startObject("fields").startObject("multi").field("type", "object").endObject().endObject()
.field("type", "text").startObject("fields").startObject("multi").field("type", "object")
.endObject().endObject()
.endObject().endObject().endObject().endObject());
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {
@ -208,7 +212,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testNestedFieldNotAllowed() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
.field("type", "text").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject()
.field("type", "text").startObject("fields").startObject("multi").field("type", "nested")
.endObject().endObject()
.endObject().endObject().endObject().endObject());
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
try {

View File

@ -60,7 +60,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -88,7 +89,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -139,7 +141,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -151,13 +154,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false));
assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject()
.startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject()
.startObject().field("field1", "1").startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray()
.endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
@ -191,7 +202,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -203,13 +215,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true));
assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject()
.startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject()
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
@ -237,13 +257,16 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
}
public void testMultiObjectAndNested2() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("nested1").field("type", "nested").field("include_in_parent", true).startObject("properties")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("nested1").field("type", "nested").field("include_in_parent", true)
.startObject("properties")
.startObject("nested2").field("type", "nested").field("include_in_parent", true)
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -255,13 +278,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true));
assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject()
.startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject()
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
@ -295,7 +326,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
@ -307,13 +339,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false));
assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(true));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject()
.startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject()
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
@ -348,15 +388,18 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
public void testMultipleLevelsIncludeRoot1() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject("type").startObject("properties")
.startObject("nested1").field("type", "nested").field("include_in_root", true).field("include_in_parent", true).startObject("properties")
.startObject("nested2").field("type", "nested").field("include_in_root", true).field("include_in_parent", true)
.startObject("nested1").field("type", "nested").field("include_in_root", true)
.field("include_in_parent", true).startObject("properties")
.startObject("nested2").field("type", "nested").field("include_in_root", true)
.field("include_in_parent", true)
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startArray("nested1")
.startObject().startArray("nested2").startObject().field("foo", "bar")
.endObject().endArray().endObject().endArray()
@ -386,10 +429,11 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startArray("nested1")
.startObject().startArray("nested2")
.startObject().startArray("nested3").startObject().field("foo", "bar")
@ -408,15 +452,16 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
assertThat(nested1Mapper.nested().isNested(), equalTo(true));
assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
@ -448,22 +493,26 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
};
// default limit allows at least two nested fields
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE);
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")),
MergeReason.MAPPING_UPDATE);
// explicitly setting limit to 0 prevents nested fields
Exception e = expectThrows(IllegalArgumentException.class, () ->
createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
createIndex("test2", Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
// setting limit to 1 with 2 nested fields fails
e = expectThrows(IllegalArgumentException.class, () ->
createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
createIndex("test3", Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
// do not check nested fields limit if mapping is not updated
createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
createIndex("test4", Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_RECOVERY);
}
@ -519,7 +568,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
SourceToParse source1 = SourceToParse.source("test1", "type", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + defaultMaxNoNestedDocs
@ -551,7 +601,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
SourceToParse source1 = SourceToParse.source("test1", "type", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
ParsedDocument doc = docMapper.parse(source1);
assertThat(doc.docs().size(), equalTo(3));
@ -568,7 +619,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
docBuilder2.endArray();
}
docBuilder2.endObject();
SourceToParse source2 = SourceToParse.source("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
SourceToParse source2 = SourceToParse.source("test1", "type", "2",
BytesReference.bytes(docBuilder2), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs
@ -605,7 +657,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
SourceToParse source1 = SourceToParse.source("test1", "type", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
ParsedDocument doc = docMapper.parse(source1);
assertThat(doc.docs().size(), equalTo(3));
@ -627,7 +680,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
}
docBuilder2.endObject();
SourceToParse source2 = SourceToParse.source("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
SourceToParse source2 = SourceToParse.source("test1", "type", "2",
BytesReference.bytes(docBuilder2), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs
@ -660,8 +714,8 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
assertThat(nested1Mapper.nested().isNested(), equalTo(true));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")

View File

@ -36,10 +36,11 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("obj1").field("type", "object").endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("obj1").endObject()
.field("value1", "test1")
@ -48,8 +49,8 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().get("value1"), equalTo("test1"));
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("obj1")
.field("value1", "test1")
@ -58,8 +59,8 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().get("value1"), equalTo("test1"));
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
doc = defaultMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("obj1").field("field", "value").endObject()
.field("value1", "test1")

View File

@ -40,7 +40,8 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
defaultMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray(" {\n" +
" \"object\": {\n" +
@ -68,8 +69,8 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testEmptyFieldsArrayMultiFields() throws Exception {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
String mapping =
Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("tweet")
.startObject("properties")
@ -85,8 +86,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("tweet")
.startObject("properties")
@ -110,8 +110,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testEmptyFieldsArray() throws Exception {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("tweet")
.startObject("properties")
@ -124,8 +123,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testFieldsWithFilledArrayShouldThrowException() throws Exception {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("tweet")
.startObject("properties")
@ -145,8 +143,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testFieldPropertiesArray() throws Exception {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("tweet")
.startObject("properties")
@ -185,7 +182,8 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
}
public void testEmptyName() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("")
.startObject("properties")
.startObject("name")

View File

@ -31,7 +31,8 @@ import static org.hamcrest.Matchers.nullValue;
public class PathMapperTests extends ESSingleNodeTestCase {
public void testPathMapping() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/path/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("person", new CompressedXContent(mapping));
// test full name
assertThat(docMapper.mappers().getMapper("first1"), nullValue());

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class RoutingFieldMapperTests extends ESSingleNodeTestCase {
@ -33,7 +34,8 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase {
public void testRoutingMapper() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
@ -48,14 +50,16 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase {
public void testIncludeInObjectNotAllowed() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
try {
docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("_routing", "foo").endObject()),XContentType.JSON));
fail("Expected failure to parse metadata field");
} catch (MapperParsingException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Field [_routing] is a metadata field and cannot be added inside a document"));
assertThat(e.getMessage(), e.getMessage(),
containsString("Field [_routing] is a metadata field and cannot be added inside a document"));
}
}
}

View File

@ -54,7 +54,8 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject()),
XContentType.JSON));
@ -62,7 +63,8 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase {
assertThat(XContentFactory.xContentType(doc.source().toBytesRef().bytes), equalTo(XContentType.JSON));
documentMapper = parser.parse("type", new CompressedXContent(mapping));
doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject()
doc = documentMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.smileBuilder().startObject()
.field("field", "value")
.endObject()),
XContentType.SMILE));
@ -75,9 +77,11 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase {
.startObject("_source").array("includes", new String[]{"path1*"}).endObject()
.endObject().endObject());
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
.startObject("path2").field("field2", "value2").endObject()
.endObject()),
@ -97,9 +101,11 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase {
.startObject("_source").array("excludes", new String[]{"path1*"}).endObject()
.endObject().endObject());
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
.startObject("path2").field("field2", "value2").endObject()
.endObject()),
@ -206,10 +212,12 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase {
public void testSourceObjectContainsExtraTokens() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
try {
documentMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object (invalid JSON)
documentMapper.parse(SourceToParse.source("test", "type", "1",
new BytesArray("{}}"), XContentType.JSON)); // extra end object (invalid JSON)
fail("Expected parse exception");
} catch (MapperParsingException e) {
assertNotNull(e.getRootCause());

View File

@ -80,7 +80,8 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping
try {
indexService.mapperService().merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE);
indexService.mapperService().merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)),
MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
// expected