Move mapper validation to the mappers themselves (#60072) (#60649)

Currently, validation of mappers (checking that cross-references are correct, limits on
field name lengths and object depths, multiple definitions, etc) is performed by the
MapperService. This means that any mapper-specific validation, for example that done
on the CompletionFieldMapper, needs to be called specifically from core server code,
and so we can't add validation to mappers that live in plugins.

This commit reworks the validation framework so that mapper-specific validation is
done on the Mapper itself. Mapper gets a new `validate(MappingLookup)`
method (already present on `MetadataFieldMapper` and now pulled up to the parent
interface), which is called from a new `DocumentMapper.validate()` method. All
the validation code currently living on `MapperService` moves either to individual
mapper implementations (FieldAliasMapper, CompletionFieldMapper) or into
`MappingLookup`, an altered `DocumentFieldMappers` which now knows about
object fields and can check for duplicate definitions, or into DocumentMapper
which handles soft limit checks.
This commit is contained in:
Alan Woodward 2020-08-04 14:39:20 +01:00 committed by GitHub
parent 212ce22d15
commit b3ae5d26bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 535 additions and 713 deletions

View File

@ -34,7 +34,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -91,7 +91,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
}
DocumentMapper mapper = service.documentMapper();
String joinField = fieldType.getJoinField();
DocumentFieldMappers fieldMappers = mapper.mappers();
MappingLookup fieldMappers = mapper.mappers();
return (ParentJoinFieldMapper) fieldMappers.getMapper(joinField);
}

View File

@ -39,7 +39,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -281,8 +281,8 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class);
when(metaJoinFieldType.getJoinField()).thenReturn("join_field");
when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType);
DocumentFieldMappers fieldMappers = new DocumentFieldMappers(Collections.singleton(joinFieldMapper),
Collections.emptyList(), null);
MappingLookup fieldMappers = new MappingLookup(Collections.singleton(joinFieldMapper),
Collections.emptyList(), Collections.emptyList(), 0, null);
DocumentMapper mockMapper = mock(DocumentMapper.class);
when(mockMapper.mappers()).thenReturn(fieldMappers);
when(mapperService.documentMapper()).thenReturn(mockMapper);

View File

@ -40,7 +40,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -232,8 +232,8 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class);
when(metaJoinFieldType.getJoinField()).thenReturn("join_field");
when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType);
DocumentFieldMappers fieldMappers = new DocumentFieldMappers(Collections.singleton(joinFieldMapper),
Collections.emptyList(), null);
MappingLookup fieldMappers = new MappingLookup(Collections.singleton(joinFieldMapper),
Collections.emptyList(), Collections.emptyList(), 0, null);
DocumentMapper mockMapper = mock(DocumentMapper.class);
when(mockMapper.mappers()).thenReturn(fieldMappers);
when(mapperService.documentMapper()).thenReturn(mockMapper);

View File

@ -400,9 +400,9 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice."));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once"));
}
{
@ -426,9 +426,9 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice."));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once"));
}
}

View File

@ -157,7 +157,7 @@ public class DynamicMappingIT extends ESIntegTestCase {
try {
assertThat(
expectThrows(IllegalArgumentException.class, () -> indexRequestBuilder.get(TimeValue.timeValueSeconds(10))).getMessage(),
Matchers.containsString("Limit of total fields [2] in index [index] has been exceeded"));
Matchers.containsString("Limit of total fields [2] has been exceeded"));
} finally {
indexingCompletedLatch.countDown();
}

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.shard.ShardId;
@ -174,7 +174,7 @@ public class TransportGetFieldMappingsIndexAction
DocumentMapper documentMapper,
GetFieldMappingsIndexRequest request) {
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
final DocumentFieldMappers allFieldMappers = documentMapper.mappers();
final MappingLookup allFieldMappers = documentMapper.mappers();
for (String field : request.fields()) {
if (Regex.isMatchAllPattern(field)) {
for (Mapper fieldMapper : allFieldMappers) {

View File

@ -91,7 +91,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), defaultAnalyzer).init(this);
return new Builder(simpleName(), defaultAnalyzer, indexVersionCreated).init(this);
}
public static class Defaults {
@ -148,15 +148,17 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final NamedAnalyzer defaultAnalyzer;
private final Version indexVersionCreated;
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(Builder.class));
/**
* @param name of the completion field to build
*/
public Builder(String name, NamedAnalyzer defaultAnalyzer) {
public Builder(String name, NamedAnalyzer defaultAnalyzer, Version indexVersionCreated) {
super(name);
this.defaultAnalyzer = defaultAnalyzer;
this.indexVersionCreated = indexVersionCreated;
this.analyzer = Parameter.analyzerParam("analyzer", false, m -> toType(m).analyzer, () -> defaultAnalyzer);
this.searchAnalyzer
= Parameter.analyzerParam("search_analyzer", true, m -> toType(m).searchAnalyzer, analyzer::getValue);
@ -202,7 +204,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
ft.setPreserveSep(preserveSeparators.getValue());
ft.setIndexAnalyzer(analyzer.getValue());
return new CompletionFieldMapper(name, ft, defaultAnalyzer,
multiFieldsBuilder.build(this, context), copyTo.build(), this);
multiFieldsBuilder.build(this, context), copyTo.build(), indexVersionCreated, this);
}
private void checkCompletionContextsLimit(BuilderContext context) {
@ -220,7 +222,8 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
public static final Set<String> ALLOWED_CONTENT_FIELD_NAMES = Sets.newHashSet(Fields.CONTENT_FIELD_NAME_INPUT,
Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTENT_FIELD_NAME_CONTEXTS);
public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers().get("simple")));
public static final TypeParser PARSER
= new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers().get("simple"), c.indexVersionCreated()));
public static final class CompletionFieldType extends TermBasedFieldType {
@ -329,9 +332,10 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
private final NamedAnalyzer analyzer;
private final NamedAnalyzer searchAnalyzer;
private final ContextMappings contexts;
private final Version indexVersionCreated;
public CompletionFieldMapper(String simpleName, MappedFieldType mappedFieldType, NamedAnalyzer defaultAnalyzer,
MultiFields multiFields, CopyTo copyTo, Builder builder) {
MultiFields multiFields, CopyTo copyTo, Version indexVersionCreated, Builder builder) {
super(simpleName, mappedFieldType, multiFields, copyTo);
this.defaultAnalyzer = defaultAnalyzer;
this.maxInputLength = builder.maxInputLength.getValue();
@ -340,6 +344,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
this.analyzer = builder.analyzer.getValue();
this.searchAnalyzer = builder.searchAnalyzer.getValue();
this.contexts = builder.contexts.getValue();
this.indexVersionCreated = indexVersionCreated;
}
@Override
@ -568,6 +573,12 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE;
}
@Override
public void doValidate(MappingLookup mappers) {
if (fieldType().hasContextMappings()) {
for (ContextMapping<?> contextMapping : fieldType().getContextMappings()) {
contextMapping.validateReferences(indexVersionCreated, s -> mappers.fieldTypes().get(s));
}
}
}
}

View File

@ -1,86 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
public final class DocumentFieldMappers implements Iterable<Mapper> {
/** Full field name to mapper */
private final Map<String, Mapper> fieldMappers;
private final FieldNameAnalyzer indexAnalyzer;
private static void put(Map<String, Analyzer> analyzers, String key, Analyzer value, Analyzer defaultValue) {
if (value == null) {
value = defaultValue;
}
analyzers.put(key, value);
}
public DocumentFieldMappers(Collection<FieldMapper> mappers,
Collection<FieldAliasMapper> aliasMappers,
Analyzer defaultIndex) {
Map<String, Mapper> fieldMappers = new HashMap<>();
Map<String, Analyzer> indexAnalyzers = new HashMap<>();
for (FieldMapper mapper : mappers) {
fieldMappers.put(mapper.name(), mapper);
MappedFieldType fieldType = mapper.fieldType();
put(indexAnalyzers, fieldType.name(), fieldType.indexAnalyzer(), defaultIndex);
}
for (FieldAliasMapper aliasMapper : aliasMappers) {
fieldMappers.put(aliasMapper.name(), aliasMapper);
}
this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers);
}
/**
* Returns the leaf mapper associated with this field name. Note that the returned mapper
* could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}.
*
* To access a field's type information, {@link MapperService#fieldType} should be used instead.
*/
public Mapper getMapper(String field) {
return fieldMappers.get(field);
}
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper}.
*/
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Iterator<Mapper> iterator() {
return fieldMappers.values().iterator();
}
}

View File

@ -44,13 +44,9 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Stream;
@ -60,7 +56,7 @@ public class DocumentMapper implements ToXContentFragment {
public static class Builder {
private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
private final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
private final RootObjectMapper rootObjectMapper;
@ -111,7 +107,7 @@ public class DocumentMapper implements ToXContentFragment {
Mapping mapping = new Mapping(
mapperService.getIndexSettings().getIndexVersionCreated(),
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
metadataMappers.values().toArray(new MetadataFieldMapper[0]),
meta);
return new DocumentMapper(mapperService, mapping);
}
@ -128,11 +124,8 @@ public class DocumentMapper implements ToXContentFragment {
private final DocumentParser documentParser;
private final DocumentFieldMappers fieldMappers;
private final MappingLookup fieldMappers;
private final Map<String, ObjectMapper> objectMappers;
private final boolean hasNestedObjects;
private final MetadataFieldMapper[] deleteTombstoneMetadataFieldMappers;
private final MetadataFieldMapper[] noopTombstoneMetadataFieldMappers;
@ -144,39 +137,8 @@ public class DocumentMapper implements ToXContentFragment {
this.mapping = mapping;
this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this);
// collect all the mappers for this type
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
List<FieldAliasMapper> newFieldAliasMappers = new ArrayList<>();
for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) {
if (metadataMapper instanceof FieldMapper) {
newFieldMappers.add(metadataMapper);
}
}
MapperUtils.collect(this.mapping.root,
newObjectMappers, newFieldMappers, newFieldAliasMappers);
final IndexAnalyzers indexAnalyzers = mapperService.getIndexAnalyzers();
this.fieldMappers = new DocumentFieldMappers(newFieldMappers,
newFieldAliasMappers,
indexAnalyzers.getDefaultIndexAnalyzer());
Map<String, ObjectMapper> builder = new HashMap<>();
for (ObjectMapper objectMapper : newObjectMappers) {
ObjectMapper previous = builder.put(objectMapper.fullPath(), objectMapper);
if (previous != null) {
throw new IllegalStateException("duplicate key " + objectMapper.fullPath() + " encountered");
}
}
boolean hasNestedObjects = false;
this.objectMappers = Collections.unmodifiableMap(builder);
for (ObjectMapper objectMapper : newObjectMappers) {
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
this.hasNestedObjects = hasNestedObjects;
this.fieldMappers = MappingLookup.fromMapping(this.mapping, indexAnalyzers.getDefaultIndexAnalyzer());
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
@ -251,15 +213,19 @@ public class DocumentMapper implements ToXContentFragment {
}
public boolean hasNestedObjects() {
return hasNestedObjects;
return mappers().hasNested();
}
public DocumentFieldMappers mappers() {
public MappingLookup mappers() {
return this.fieldMappers;
}
public FieldTypeLookup fieldTypes() {
return mappers().fieldTypes();
}
public Map<String, ObjectMapper> objectMappers() {
return this.objectMappers;
return mappers().objectMappers();
}
public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
@ -296,7 +262,7 @@ public class DocumentMapper implements ToXContentFragment {
continue;
}
// We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and
// therefor is guaranteed to be a live doc.
// therefore is guaranteed to be a live doc.
final Weight nestedWeight = filter.createWeight(sc.searcher(), ScoreMode.COMPLETE_NO_SCORES, 1f);
Scorer scorer = nestedWeight.scorer(context);
if (scorer == null) {
@ -321,6 +287,22 @@ public class DocumentMapper implements ToXContentFragment {
return new DocumentMapper(mapperService, merged);
}
public void validate(IndexSettings settings, boolean checkLimits) {
this.mapping.validate(this.fieldMappers);
if (settings.getIndexMetadata().isRoutingPartitionedIndex()) {
if (routingFieldMapper().required() == false) {
throw new IllegalArgumentException("mapping type [" + type() + "] must have routing "
+ "required for partitioned index [" + settings.getIndex().getName() + "]");
}
}
if (settings.getIndexSortConfig().hasIndexSort() && hasNestedObjects()) {
throw new IllegalArgumentException("cannot have nested fields when index sort is activated");
}
if (checkLimits) {
this.fieldMappers.checkLimits(settings);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return mapping.toXContent(builder, params);
@ -336,8 +318,8 @@ public class DocumentMapper implements ToXContentFragment {
", mapping=" + mapping +
", documentParser=" + documentParser +
", fieldMappers=" + fieldMappers +
", objectMappers=" + objectMappers +
", hasNestedObjects=" + hasNestedObjects +
", objectMappers=" + objectMappers() +
", hasNestedObjects=" + hasNestedObjects() +
", deleteTombstoneMetadataFieldMappers=" + Arrays.toString(deleteTombstoneMetadataFieldMappers) +
", noopTombstoneMetadataFieldMappers=" + Arrays.toString(noopTombstoneMetadataFieldMappers) +
'}';

View File

@ -26,6 +26,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
/**
* A mapper for field aliases.
@ -87,6 +88,37 @@ public final class FieldAliasMapper extends Mapper {
.endObject();
}
@Override
public void validate(MappingLookup mappers) {
if (Objects.equals(this.path(), this.name())) {
throw new MapperParsingException("Invalid [path] value [" + path + "] for field alias [" +
name() + "]: an alias cannot refer to itself.");
}
if (mappers.fieldTypes().get(path) == null) {
throw new MapperParsingException("Invalid [path] value [" + path + "] for field alias [" +
name() + "]: an alias must refer to an existing field in the mappings.");
}
if (mappers.getMapper(path) instanceof FieldAliasMapper) {
throw new MapperParsingException("Invalid [path] value [" + path + "] for field alias [" +
name() + "]: an alias cannot refer to another alias.");
}
String aliasScope = mappers.getNestedScope(name);
String pathScope = mappers.getNestedScope(path);
if (!Objects.equals(aliasScope, pathScope)) {
StringBuilder message = new StringBuilder("Invalid [path] value [" + path + "] for field alias [" +
name + "]: an alias must have the same nested scope as its target. ");
message.append(aliasScope == null
? "The alias is not nested"
: "The alias's nested scope is [" + aliasScope + "]");
message.append(", but ");
message.append(pathScope == null
? "the target is not nested."
: "the target's nested scope is [" + pathScope + "].");
throw new IllegalArgumentException(message.toString());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)

View File

@ -224,6 +224,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return copyTo;
}
public MultiFields multiFields() {
return multiFields;
}
/**
* A value to use in place of a {@code null} value in the document source.
*/
@ -349,6 +353,48 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
}
}
@Override
public final void validate(MappingLookup mappers) {
if (this.copyTo() != null && this.copyTo().copyToFields().isEmpty() == false) {
if (mappers.isMultiField(this.name())) {
throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + this.name() + "]");
}
final String sourceScope = mappers.getNestedScope(this.name());
for (String copyTo : this.copyTo().copyToFields()) {
if (mappers.isMultiField(copyTo)) {
throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]");
}
if (mappers.isObjectField(copyTo)) {
throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object");
}
final String targetScope = mappers.getNestedScope(copyTo);
checkNestedScopeCompatibility(sourceScope, targetScope);
}
}
for (Mapper multiField : multiFields()) {
multiField.validate(mappers);
}
doValidate(mappers);
}
protected void doValidate(MappingLookup mappers) { }
private static void checkNestedScopeCompatibility(String source, String target) {
boolean targetIsParentOfSource;
if (source == null || target == null) {
targetIsParentOfSource = target == null;
} else {
targetIsParentOfSource = source.equals(target) || source.startsWith(target + ".");
}
if (targetIsParentOfSource == false) {
throw new IllegalArgumentException(
"Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " +
"document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" +
source + "] to [" + target + "]");
}
}
@Override
public FieldMapper merge(Mapper mergeWith) {

View File

@ -190,4 +190,10 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
* Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith);
/**
* Validate any cross-field references made by this mapper
* @param mappers a {@link MappingLookup} that can produce references to other mappers
*/
public abstract void validate(MappingLookup mappers);
}

View File

@ -1,215 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* A utility class that helps validate certain aspects of a mappings update.
*/
class MapperMergeValidator {
/**
* Validates the new mapping addition, checking whether duplicate entries are present and if the
* provided fields are compatible with the mappings that are already defined.
*
* @param objectMappers The newly added object mappers.
* @param fieldMappers The newly added field mappers.
* @param fieldAliasMappers The newly added field alias mappers.
*/
public static void validateNewMappers(Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers) {
Set<String> objectFullNames = new HashSet<>();
for (ObjectMapper objectMapper : objectMappers) {
String fullPath = objectMapper.fullPath();
if (objectFullNames.add(fullPath) == false) {
throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice.");
}
}
Set<String> fieldNames = new HashSet<>();
for (FieldMapper fieldMapper : fieldMappers) {
String name = fieldMapper.name();
if (objectFullNames.contains(name)) {
throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field.");
} else if (fieldNames.add(name) == false) {
throw new IllegalArgumentException("Field [" + name + "] is defined twice.");
}
}
Set<String> fieldAliasNames = new HashSet<>();
for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) {
String name = fieldAliasMapper.name();
if (objectFullNames.contains(name)) {
throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field.");
} else if (fieldNames.contains(name)) {
throw new IllegalArgumentException("Field [" + name + "] is defined both as an alias and a concrete field.");
} else if (fieldAliasNames.add(name) == false) {
throw new IllegalArgumentException("Field [" + name + "] is defined twice.");
}
validateFieldAliasMapper(name, fieldAliasMapper.path(), fieldNames, fieldAliasNames);
}
}
/**
* Checks that the new field alias is valid.
*
* Note that this method assumes that new concrete fields have already been processed, so that it
* can verify that an alias refers to an existing concrete field.
*/
private static void validateFieldAliasMapper(String aliasName,
String path,
Set<String> fieldMappers,
Set<String> fieldAliasMappers) {
if (path.equals(aliasName)) {
throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" +
aliasName + "]: an alias cannot refer to itself.");
}
if (fieldAliasMappers.contains(path)) {
throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" +
aliasName + "]: an alias cannot refer to another alias.");
}
if (fieldMappers.contains(path) == false) {
throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" +
aliasName + "]: an alias must refer to an existing field in the mappings.");
}
}
/**
* Verifies that each field reference, e.g. the value of copy_to or the target
* of a field alias, corresponds to a valid part of the mapping.
*
* @param fieldMappers The newly added field mappers.
* @param fieldAliasMappers The newly added field alias mappers.
* @param fullPathObjectMappers All object mappers, indexed by their full path.
* @param fieldTypes All field and field alias mappers, collected into a lookup structure.
* @param metadataMappers the new metadata field mappers
* @param newMapper The newly created {@link DocumentMapper}
*/
public static void validateFieldReferences(List<FieldMapper> fieldMappers,
List<FieldAliasMapper> fieldAliasMappers,
Map<String, ObjectMapper> fullPathObjectMappers,
FieldTypeLookup fieldTypes,
MetadataFieldMapper[] metadataMappers,
DocumentMapper newMapper) {
validateCopyTo(fieldMappers, fullPathObjectMappers, fieldTypes);
validateFieldAliasTargets(fieldAliasMappers, fullPathObjectMappers);
validateMetadataFieldMappers(metadataMappers, newMapper);
}
private static void validateCopyTo(List<FieldMapper> fieldMappers,
Map<String, ObjectMapper> fullPathObjectMappers,
FieldTypeLookup fieldTypes) {
for (FieldMapper mapper : fieldMappers) {
if (mapper.copyTo() != null && mapper.copyTo().copyToFields().isEmpty() == false) {
String sourceParent = parentObject(mapper.name());
if (sourceParent != null && fieldTypes.get(sourceParent) != null) {
throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + mapper.name() + "]");
}
final String sourceScope = getNestedScope(mapper.name(), fullPathObjectMappers);
for (String copyTo : mapper.copyTo().copyToFields()) {
String copyToParent = parentObject(copyTo);
if (copyToParent != null && fieldTypes.get(copyToParent) != null) {
throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]");
}
if (fullPathObjectMappers.containsKey(copyTo)) {
throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object");
}
final String targetScope = getNestedScope(copyTo, fullPathObjectMappers);
checkNestedScopeCompatibility(sourceScope, targetScope);
}
}
}
}
private static void validateFieldAliasTargets(List<FieldAliasMapper> fieldAliasMappers,
Map<String, ObjectMapper> fullPathObjectMappers) {
for (FieldAliasMapper mapper : fieldAliasMappers) {
String aliasName = mapper.name();
String path = mapper.path();
String aliasScope = getNestedScope(aliasName, fullPathObjectMappers);
String pathScope = getNestedScope(path, fullPathObjectMappers);
if (!Objects.equals(aliasScope, pathScope)) {
StringBuilder message = new StringBuilder("Invalid [path] value [" + path + "] for field alias [" +
aliasName + "]: an alias must have the same nested scope as its target. ");
message.append(aliasScope == null
? "The alias is not nested"
: "The alias's nested scope is [" + aliasScope + "]");
message.append(", but ");
message.append(pathScope == null
? "the target is not nested."
: "the target's nested scope is [" + pathScope + "].");
throw new IllegalArgumentException(message.toString());
}
}
}
private static void validateMetadataFieldMappers(MetadataFieldMapper[] metadataMappers, DocumentMapper newMapper) {
for (MetadataFieldMapper metadataFieldMapper : metadataMappers) {
metadataFieldMapper.validate(newMapper.mappers());
}
}
private static String getNestedScope(String path, Map<String, ObjectMapper> fullPathObjectMappers) {
for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) {
ObjectMapper objectMapper = fullPathObjectMappers.get(parentPath);
if (objectMapper != null && objectMapper.nested().isNested()) {
return parentPath;
}
}
return null;
}
private static void checkNestedScopeCompatibility(String source, String target) {
boolean targetIsParentOfSource;
if (source == null || target == null) {
targetIsParentOfSource = target == null;
} else {
targetIsParentOfSource = source.equals(target) || source.startsWith(target + ".");
}
if (targetIsParentOfSource == false) {
throw new IllegalArgumentException(
"Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " +
"document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" +
source + "] to [" + target + "]");
}
}
private static String parentObject(String field) {
int lastDot = field.lastIndexOf('.');
if (lastDot == -1) {
return null;
}
return field.substring(0, lastDot);
}
}

View File

@ -44,7 +44,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexSortConfig;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -57,13 +56,11 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -74,7 +71,6 @@ import java.util.Set;
import java.util.function.BooleanSupplier;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
@ -141,10 +137,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private volatile DocumentMapper mapper;
private volatile DocumentMapper defaultMapper;
private volatile FieldTypeLookup fieldTypes;
private volatile Map<String, ObjectMapper> fullPathObjectMappers = emptyMap();
private boolean hasNested = false; // updated dynamically to true when a nested object is added
private final DocumentMapperParser documentParser;
private final Version indexVersionCreated;
@ -164,7 +156,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
super(indexSettings);
this.indexVersionCreated = indexSettings.getIndexVersionCreated();
this.indexAnalyzers = indexAnalyzers;
this.fieldTypes = new FieldTypeLookup();
this.documentParser = new DocumentMapperParser(indexSettings, this, xContentRegistry, similarityService, mapperRegistry,
queryShardContextSupplier);
this.indexAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultIndexAnalyzer(), MappedFieldType::indexAnalyzer);
@ -188,7 +179,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
public boolean hasNested() {
return this.hasNested;
return this.mapper != null && this.mapper.hasNestedObjects();
}
public IndexAnalyzers getIndexAnalyzers() {
@ -453,10 +444,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
private synchronized Map<String, DocumentMapper> internalMerge(@Nullable DocumentMapper defaultMapper,
@Nullable String defaultMappingSource, DocumentMapper mapper,
MergeReason reason) {
boolean hasNested = this.hasNested;
Map<String, ObjectMapper> fullPathObjectMappers = this.fullPathObjectMappers;
@Nullable String defaultMappingSource, DocumentMapper mapper, MergeReason reason) {
Map<String, DocumentMapper> results = new LinkedHashMap<>(2);
@ -471,7 +459,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
DocumentMapper newMapper = null;
FieldTypeLookup newFieldTypes = null;
if (mapper != null) {
// check naming
validateTypeName(mapper.type());
@ -485,55 +472,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
newMapper.root().fixRedundantIncludes();
// check basic sanity of the new mapping
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
List<FieldAliasMapper> fieldAliasMappers = new ArrayList<>();
MetadataFieldMapper[] metadataMappers = newMapper.mapping().metadataMappers;
Collections.addAll(fieldMappers, metadataMappers);
MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers);
MapperMergeValidator.validateNewMappers(objectMappers, fieldMappers, fieldAliasMappers);
checkPartitionedIndexConstraints(newMapper);
// update lookup data-structures
newFieldTypes = new FieldTypeLookup(fieldMappers, fieldAliasMappers);
for (ObjectMapper objectMapper : objectMappers) {
if (fullPathObjectMappers == this.fullPathObjectMappers) {
// first time through the loops
fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers);
}
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers,
fullPathObjectMappers, newFieldTypes, metadataMappers, newMapper);
ContextMapping.validateContextPaths(indexSettings.getIndexVersionCreated(), fieldMappers, newFieldTypes::get);
if (reason != MergeReason.MAPPING_RECOVERY) {
// this check will only be performed on the master node when there is
// a call to the update mapping API. For all other cases like
// the master node restoring mappings from disk or data nodes
// deserializing cluster state that was sent by the master node,
// this check will be skipped.
// Also, don't take metadata mappers into account for the field limit check
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() - metadataMappers.length
+ fieldAliasMappers.size());
checkFieldNameSoftLimit(objectMappers, fieldMappers, fieldAliasMappers);
checkNestedFieldsLimit(fullPathObjectMappers);
checkDepthLimit(fullPathObjectMappers.keySet());
}
newMapper.validate(indexSettings, reason != MergeReason.MAPPING_RECOVERY);
results.put(newMapper.type(), newMapper);
}
checkIndexSortCompatibility(indexSettings.getIndexSortConfig(), hasNested);
// make structures immutable
results = Collections.unmodifiableMap(results);
@ -542,12 +483,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return results;
}
// only need to immutably rewrap these if the previous reference was changed.
// if not then they are already implicitly immutable.
if (fullPathObjectMappers != this.fullPathObjectMappers) {
fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers);
}
// commit the change
if (defaultMappingSource != null) {
this.defaultMappingSource = defaultMappingSource;
@ -555,13 +490,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
if (newMapper != null) {
this.mapper = newMapper;
this.fieldTypes = newFieldTypes;
}
this.hasNested = hasNested;
this.fullPathObjectMappers = fullPathObjectMappers;
assert results.values().stream().allMatch(this::assertSerialization);
return results;
}
@ -578,82 +509,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return true;
}
private void checkNestedFieldsLimit(Map<String, ObjectMapper> fullPathObjectMappers) {
long allowedNestedFields = indexSettings.getMappingNestedFieldsLimit();
long actualNestedFields = 0;
for (ObjectMapper objectMapper : fullPathObjectMappers.values()) {
if (objectMapper.nested().isNested()) {
actualNestedFields++;
}
}
if (actualNestedFields > allowedNestedFields) {
throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName()
+ "] has been exceeded");
}
}
private void checkTotalFieldsLimit(long totalMappers) {
long allowedTotalFields = indexSettings.getMappingTotalFieldsLimit();
if (allowedTotalFields < totalMappers) {
throw new IllegalArgumentException("Limit of total fields [" + allowedTotalFields + "] in index [" + index().getName()
+ "] has been exceeded");
}
}
private void checkDepthLimit(Collection<String> objectPaths) {
final long maxDepth = indexSettings.getMappingDepthLimit();
for (String objectPath : objectPaths) {
checkDepthLimit(objectPath, maxDepth);
}
}
private void checkDepthLimit(String objectPath, long maxDepth) {
int numDots = 0;
for (int i = 0; i < objectPath.length(); ++i) {
if (objectPath.charAt(i) == '.') {
numDots += 1;
}
}
final int depth = numDots + 2;
if (depth > maxDepth) {
throw new IllegalArgumentException("Limit of mapping depth [" + maxDepth + "] in index [" + index().getName()
+ "] has been exceeded due to object field [" + objectPath + "]");
}
}
private void checkFieldNameSoftLimit(Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers) {
final long maxFieldNameLength = indexSettings.getMappingFieldNameLengthLimit();
Stream.of(objectMappers.stream(), fieldMappers.stream(), fieldAliasMappers.stream())
.reduce(Stream::concat)
.orElseGet(Stream::empty)
.forEach(mapper -> {
String name = mapper.simpleName();
if (name.length() > maxFieldNameLength) {
throw new IllegalArgumentException("Field name [" + name + "] in index [" + index().getName() +
"] is too long. The limit is set to [" + maxFieldNameLength + "] characters but was ["
+ name.length() + "] characters");
}
});
}
private void checkPartitionedIndexConstraints(DocumentMapper newMapper) {
if (indexSettings.getIndexMetadata().isRoutingPartitionedIndex()) {
if (!newMapper.routingFieldMapper().required()) {
throw new IllegalArgumentException("mapping type [" + newMapper.type() + "] must have routing "
+ "required for partitioned index [" + indexSettings.getIndex().getName() + "]");
}
}
}
private static void checkIndexSortCompatibility(IndexSortConfig sortConfig, boolean hasNested) {
if (sortConfig.hasIndexSort() && hasNested) {
throw new IllegalArgumentException("cannot have nested fields when index sort is activated");
}
}
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null);
}
@ -737,7 +592,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* Given the full name of a field, returns its {@link MappedFieldType}.
*/
public MappedFieldType fieldType(String fullName) {
return fieldTypes.get(fullName);
return this.mapper == null ? null : this.mapper.fieldTypes().get(fullName);
}
/**
@ -749,7 +604,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
// no wildcards
return Collections.singleton(pattern);
}
return fieldTypes.simpleMatchToFullName(pattern);
return this.mapper == null ? Collections.emptySet() : this.mapper.fieldTypes().simpleMatchToFullName(pattern);
}
/**
@ -757,18 +612,18 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* the 'source path' for a multi-field is the path to its parent field.
*/
public Set<String> sourcePath(String fullName) {
return fieldTypes.sourcePaths(fullName);
return this.mapper == null ? Collections.emptySet() : this.mapper.fieldTypes().sourcePaths(fullName);
}
/**
* Returns all mapped field types.
*/
public Iterable<MappedFieldType> fieldTypes() {
return fieldTypes;
return this.mapper == null ? Collections.emptySet() : this.mapper.fieldTypes();
}
public ObjectMapper getObjectMapper(String name) {
return fullPathObjectMappers.get(name);
return this.mapper == null ? null : this.mapper.objectMappers().get(name);
}
/**

View File

@ -1,51 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.util.Collection;
enum MapperUtils {
;
/**
* Splits the provided mapper and its descendants into object, field, and field alias mappers.
*/
public static void collect(Mapper mapper, Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers) {
if (mapper instanceof RootObjectMapper) {
// root mapper isn't really an object mapper
} else if (mapper instanceof ObjectMapper) {
objectMappers.add((ObjectMapper)mapper);
} else if (mapper instanceof FieldMapper) {
fieldMappers.add((FieldMapper)mapper);
} else if (mapper instanceof FieldAliasMapper) {
fieldAliasMappers.add((FieldAliasMapper) mapper);
} else {
throw new IllegalStateException("Unrecognized mapper type [" +
mapper.getClass().getSimpleName() + "].");
}
for (Mapper child : mapper) {
collect(child, objectMappers, fieldMappers, fieldAliasMappers);
}
}
}

View File

@ -75,6 +75,13 @@ public final class Mapping implements ToXContentFragment {
return root;
}
public void validate(MappingLookup mappers) {
for (MetadataFieldMapper metadataFieldMapper : metadataMappers) {
metadataFieldMapper.validate(mappers);
}
root.validate(mappers);
}
/**
* Generate a mapping update for the given root object mapper.
*/

View File

@ -0,0 +1,248 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
public final class MappingLookup implements Iterable<Mapper> {
/** Full field name to mapper */
private final Map<String, Mapper> fieldMappers;
private final Map<String, ObjectMapper> objectMappers;
private final boolean hasNested;
private final FieldTypeLookup fieldTypeLookup;
private final int metadataFieldCount;
private final FieldNameAnalyzer indexAnalyzer;
private static void put(Map<String, Analyzer> analyzers, String key, Analyzer value, Analyzer defaultValue) {
if (value == null) {
value = defaultValue;
}
analyzers.put(key, value);
}
public static MappingLookup fromMapping(Mapping mapping, Analyzer defaultIndex) {
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
List<FieldAliasMapper> newFieldAliasMappers = new ArrayList<>();
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
if (metadataMapper != null) {
newFieldMappers.add(metadataMapper);
}
}
collect(mapping.root, newObjectMappers, newFieldMappers, newFieldAliasMappers);
return new MappingLookup(newFieldMappers, newObjectMappers, newFieldAliasMappers, mapping.metadataMappers.length, defaultIndex);
}
private static void collect(Mapper mapper, Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers) {
if (mapper instanceof RootObjectMapper) {
// root mapper isn't really an object mapper
} else if (mapper instanceof ObjectMapper) {
objectMappers.add((ObjectMapper)mapper);
} else if (mapper instanceof FieldMapper) {
fieldMappers.add((FieldMapper)mapper);
} else if (mapper instanceof FieldAliasMapper) {
fieldAliasMappers.add((FieldAliasMapper) mapper);
} else {
throw new IllegalStateException("Unrecognized mapper type [" +
mapper.getClass().getSimpleName() + "].");
}
for (Mapper child : mapper) {
collect(child, objectMappers, fieldMappers, fieldAliasMappers);
}
}
public MappingLookup(Collection<FieldMapper> mappers,
Collection<ObjectMapper> objectMappers,
Collection<FieldAliasMapper> aliasMappers,
int metadataFieldCount,
Analyzer defaultIndex) {
Map<String, Mapper> fieldMappers = new HashMap<>();
Map<String, Analyzer> indexAnalyzers = new HashMap<>();
Map<String, ObjectMapper> objects = new HashMap<>();
boolean hasNested = false;
for (ObjectMapper mapper : objectMappers) {
if (objects.put(mapper.fullPath(), mapper) != null) {
throw new MapperParsingException("Object mapper [" + mapper.fullPath() + "] is defined more than once");
}
if (mapper.nested().isNested()) {
hasNested = true;
}
}
this.hasNested = hasNested;
for (FieldMapper mapper : mappers) {
if (objects.containsKey(mapper.name())) {
throw new MapperParsingException("Field [" + mapper.name() + "] is defined both as an object and a field");
}
if (fieldMappers.put(mapper.name(), mapper) != null) {
throw new MapperParsingException("Field [" + mapper.name() + "] is defined more than once");
}
MappedFieldType fieldType = mapper.fieldType();
put(indexAnalyzers, fieldType.name(), fieldType.indexAnalyzer(), defaultIndex);
}
this.metadataFieldCount = metadataFieldCount;
for (FieldAliasMapper aliasMapper : aliasMappers) {
if (objects.containsKey(aliasMapper.name())) {
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an object and an alias");
}
if (fieldMappers.put(aliasMapper.name(), aliasMapper) != null) {
throw new MapperParsingException("Alias [" + aliasMapper.name() + "] is defined both as an alias and a concrete field");
}
}
this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers);
this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers);
this.objectMappers = Collections.unmodifiableMap(objects);
}
/**
* Returns the leaf mapper associated with this field name. Note that the returned mapper
* could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}.
*
* To access a field's type information, {@link MapperService#fieldType} should be used instead.
*/
public Mapper getMapper(String field) {
return fieldMappers.get(field);
}
public FieldTypeLookup fieldTypes() {
return fieldTypeLookup;
}
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper}.
*/
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Iterator<Mapper> iterator() {
return fieldMappers.values().iterator();
}
public void checkLimits(IndexSettings settings) {
checkFieldLimit(settings.getMappingTotalFieldsLimit());
checkObjectDepthLimit(settings.getMappingDepthLimit());
checkFieldNameLengthLimit(settings.getMappingFieldNameLengthLimit());
checkNestedLimit(settings.getMappingNestedFieldsLimit());
}
private void checkFieldLimit(long limit) {
if (fieldMappers.size() + objectMappers.size() - metadataFieldCount > limit) {
throw new IllegalArgumentException("Limit of total fields [" + limit + "] has been exceeded");
}
}
private void checkObjectDepthLimit(long limit) {
for (String objectPath : objectMappers.keySet()) {
int numDots = 0;
for (int i = 0; i < objectPath.length(); ++i) {
if (objectPath.charAt(i) == '.') {
numDots += 1;
}
}
final int depth = numDots + 2;
if (depth > limit) {
throw new IllegalArgumentException("Limit of mapping depth [" + limit +
"] has been exceeded due to object field [" + objectPath + "]");
}
}
}
private void checkFieldNameLengthLimit(long limit) {
Stream.of(objectMappers.values().stream(), fieldMappers.values().stream())
.reduce(Stream::concat)
.orElseGet(Stream::empty)
.forEach(mapper -> {
String name = mapper.simpleName();
if (name.length() > limit) {
throw new IllegalArgumentException("Field name [" + name + "] is longer than the limit of [" + limit + "] characters");
}
});
}
private void checkNestedLimit(long limit) {
long actualNestedFields = 0;
for (ObjectMapper objectMapper : objectMappers.values()) {
if (objectMapper.nested().isNested()) {
actualNestedFields++;
}
}
if (actualNestedFields > limit) {
throw new IllegalArgumentException("Limit of nested fields [" + limit + "] has been exceeded");
}
}
public boolean hasNested() {
return hasNested;
}
public Map<String, ObjectMapper> objectMappers() {
return objectMappers;
}
public boolean isMultiField(String field) {
String sourceParent = parentObject(field);
return sourceParent != null && fieldMappers.containsKey(sourceParent);
}
public boolean isObjectField(String field) {
return objectMappers.containsKey(field);
}
public String getNestedScope(String path) {
for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) {
ObjectMapper objectMapper = objectMappers.get(parentPath);
if (objectMapper != null && objectMapper.nested().isNested()) {
return parentPath;
}
}
return null;
}
private static String parentObject(String field) {
int lastDot = field.lastIndexOf('.');
if (lastDot == -1) {
return null;
}
return field.substring(0, lastDot);
}
}

View File

@ -69,14 +69,6 @@ public abstract class MetadataFieldMapper extends FieldMapper {
super(mappedFieldType.name(), fieldType, mappedFieldType, MultiFields.empty(), CopyTo.empty());
}
/**
* Called when mapping gets merged. Provides the opportunity to validate other fields a metadata field mapper
* is supposed to work with before a mapping update is completed.
*/
public void validate(DocumentFieldMappers lookup) {
// noop by default
}
/**
* Called before {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
*/

View File

@ -480,6 +480,13 @@ public class ObjectMapper extends Mapper implements Cloneable {
return merge(mergeWith, MergeReason.MAPPING_UPDATE);
}
@Override
public void validate(MappingLookup mappers) {
for (Mapper mapper : this.mappers.values()) {
mapper.validate(mappers);
}
}
public ObjectMapper merge(Mapper mergeWith, MergeReason reason) {
if (!(mergeWith instanceof ObjectMapper)) {
throw new IllegalArgumentException("can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping");

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.lookup.SourceLookup;
@ -38,12 +38,12 @@ import java.util.Set;
* Then given a specific document, it can retrieve the corresponding fields from the document's source.
*/
public class FieldValueRetriever {
private final DocumentFieldMappers fieldMappers;
private final MappingLookup fieldMappers;
private final List<FieldContext> fieldContexts;
public static FieldValueRetriever create(MapperService mapperService,
Collection<FieldAndFormat> fieldAndFormats) {
DocumentFieldMappers fieldMappers = mapperService.documentMapper().mappers();
MappingLookup fieldMappers = mapperService.documentMapper().mappers();
List<FieldContext> fields = new ArrayList<>();
for (FieldAndFormat fieldAndFormat : fieldAndFormats) {
@ -63,7 +63,7 @@ public class FieldValueRetriever {
}
private FieldValueRetriever(DocumentFieldMappers fieldMappers,
private FieldValueRetriever(MappingLookup fieldMappers,
List<FieldContext> fieldContexts) {
this.fieldMappers = fieldMappers;
this.fieldContexts = fieldContexts;

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParseContext;
@ -140,27 +139,10 @@ public abstract class ContextMapping<T extends ToXContent> implements ToXContent
* Checks if the current context is consistent with the rest of the fields. For example, the GeoContext
* should check that the field that it points to has the correct type.
*/
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
public void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
// No validation is required by default
}
/**
* Verifies that all field paths specified in contexts point to the fields with correct mappings
*/
public static void validateContextPaths(Version indexVersionCreated, List<FieldMapper> fieldMappers,
Function<String, MappedFieldType> fieldResolver) {
for (FieldMapper fieldMapper : fieldMappers) {
if (CompletionFieldMapper.CONTENT_TYPE.equals(fieldMapper.typeName())) {
CompletionFieldMapper.CompletionFieldType fieldType = ((CompletionFieldMapper) fieldMapper).fieldType();
if (fieldType.hasContextMappings()) {
for (ContextMapping context : fieldType.getContextMappings()) {
context.validateReferences(indexVersionCreated, fieldResolver);
}
}
}
}
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(FIELD_NAME, name);

View File

@ -291,7 +291,7 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
}
@Override
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
public void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
if (fieldName != null) {
MappedFieldType mappedFieldType = fieldResolver.apply(fieldName);
if (mappedFieldType == null) {

View File

@ -57,11 +57,12 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.shard.IndexEventListener;
@ -558,9 +559,10 @@ public class MetadataRolloverServiceTests extends ESTestCase {
MappedFieldType mockedTimestampFieldType = mock(MappedFieldType.class);
when(mockedTimestampFieldType.name()).thenReturn("_data_stream_timestamp");
when(mockedTimestampField.fieldType()).thenReturn(mockedTimestampFieldType);
DocumentFieldMappers documentFieldMappers =
new DocumentFieldMappers(Arrays.asList(mockedTimestampField, dateFieldMapper),
Collections.emptyList(), new StandardAnalyzer());
when(mockedTimestampField.copyTo()).thenReturn(FieldMapper.CopyTo.empty());
when(mockedTimestampField.multiFields()).thenReturn(FieldMapper.MultiFields.empty());
MappingLookup mappingLookup = new MappingLookup(Arrays.asList(mockedTimestampField, dateFieldMapper),
Collections.emptyList(), Collections.emptyList(), 0, new StandardAnalyzer());
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
@ -568,7 +570,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
DocumentMapper documentMapper = mock(DocumentMapper.class);
when(documentMapper.mappers()).thenReturn(documentFieldMappers);
when(documentMapper.mappers()).thenReturn(mappingLookup);
when(documentMapper.type()).thenReturn("_doc");
CompressedXContent mapping =
new CompressedXContent("{\"_doc\":" + generateMapping(dataStream.getTimeStampField().getName(), "date") + "}");

View File

@ -944,7 +944,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
CompletionFieldMapper mapper = new CompletionFieldMapper.Builder("completion", defaultAnalyzer).build(context);
CompletionFieldMapper mapper = new CompletionFieldMapper.Builder("completion", defaultAnalyzer, Version.CURRENT).build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), mapper.parseSourceValue("value", null));

View File

@ -129,14 +129,15 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
Analyzer defaultIndex = new FakeAnalyzer("default_index");
DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(
MappingLookup mappingLookup = new MappingLookup(
Arrays.asList(fieldMapper1, fieldMapper2),
Collections.emptyList(),
defaultIndex);
Collections.emptyList(),
0, defaultIndex);
assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index");
assertAnalyzes(mappingLookup.indexAnalyzer(), "field1", "index");
assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field2", "default_index");
assertAnalyzes(mappingLookup.indexAnalyzer(), "field2", "default_index");
}
private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {

View File

@ -162,7 +162,7 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), reason);
final DocumentMapper documentMapper = mapperService.documentMapper("test");
DocumentFieldMappers dfm = documentMapper.mappers();
MappingLookup dfm = documentMapper.mappers();
try {
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
fail();

View File

@ -130,7 +130,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
.startObject("field")
.field("type", ExternalMapperPlugin.EXTERNAL)
.startObject("fields")
.startObject("field")
.startObject("text")
.field("type", "text")
.field("store", true)
.startObject("fields")
@ -162,16 +162,16 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
IndexableField shape = doc.rootDoc().getField("field.shape");
assertThat(shape, notNullValue());
IndexableField field = doc.rootDoc().getField("field.field");
IndexableField field = doc.rootDoc().getField("field.text");
assertThat(field, notNullValue());
assertThat(field.stringValue(), is("foo"));
IndexableField raw = doc.rootDoc().getField("field.field.raw");
IndexableField raw = doc.rootDoc().getField("field.text.raw");
assertThat(raw, notNullValue());
assertThat(raw.binaryValue(), is(new BytesRef("foo")));
assertWarnings("At least one multi-field, [field], was " +
assertWarnings("At least one multi-field, [text], was " +
"encountered that itself contains a multi-field. Defining multi-fields within a multi-field is deprecated and will " +
"no longer be supported in 8.0. To resolve the issue, all instances of [fields] that occur within a [fields] block " +
"should be removed from the mappings, either by flattening the chained [fields] blocks into a single level, or " +
@ -198,7 +198,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
.startObject("field")
.field("type", ExternalMapperPlugin.EXTERNAL)
.startObject("fields")
.startObject("field")
.startObject("text")
.field("type", "text")
.startObject("fields")
.startObject("generated")
@ -230,19 +230,19 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
assertThat(doc.rootDoc().getField("field.field"), notNullValue());
assertThat(doc.rootDoc().getField("field.field").stringValue(), is("foo"));
assertThat(doc.rootDoc().getField("field.text"), notNullValue());
assertThat(doc.rootDoc().getField("field.text").stringValue(), is("foo"));
assertThat(doc.rootDoc().getField("field.field.generated.generated"), notNullValue());
assertThat(doc.rootDoc().getField("field.field.generated.generated").stringValue(), is("bar"));
assertThat(doc.rootDoc().getField("field.text.generated.generated"), notNullValue());
assertThat(doc.rootDoc().getField("field.text.generated.generated").stringValue(), is("bar"));
assertThat(doc.rootDoc().getField("field.field.raw"), notNullValue());
assertThat(doc.rootDoc().getField("field.field.raw").stringValue(), is("foo"));
assertThat(doc.rootDoc().getField("field.text.raw"), notNullValue());
assertThat(doc.rootDoc().getField("field.text.raw").stringValue(), is("foo"));
assertThat(doc.rootDoc().getField("field.raw"), notNullValue());
assertThat(doc.rootDoc().getField("field.raw").stringValue(), is("foo"));
assertWarnings("At least one multi-field, [field], was " +
assertWarnings("At least one multi-field, [text], was " +
"encountered that itself contains a multi-field. Defining multi-fields within a multi-field is deprecated and will " +
"no longer be supported in 8.0. To resolve the issue, all instances of [fields] that occur within a [fields] block " +
"should be removed from the mappings, either by flattening the chained [fields] blocks into a single level, or " +

View File

@ -18,31 +18,32 @@
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
public class MapperMergeValidatorTests extends ESTestCase {
public class FieldAliasMapperValidationTests extends ESTestCase {
public void testDuplicateFieldAliasAndObject() {
ObjectMapper objectMapper = createObjectMapper("some.path");
FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateNewMappers(
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
new MappingLookup(
Collections.emptyList(),
singletonList(objectMapper),
emptyList(),
singletonList(aliasMapper)));
assertEquals("Field [some.path] is defined both as an object and a field.", e.getMessage());
singletonList(aliasMapper), 0, Lucene.STANDARD_ANALYZER));
assertEquals("Alias [some.path] is defined both as an object and an alias", e.getMessage());
}
public void testDuplicateFieldAliasAndConcreteField() {
@ -50,13 +51,13 @@ public class MapperMergeValidatorTests extends ESTestCase {
FieldMapper invalidField = new MockFieldMapper("invalid");
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateNewMappers(
emptyList(),
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
new MappingLookup(
Arrays.asList(field, invalidField),
singletonList(invalidAlias)));
emptyList(),
singletonList(invalidAlias), 0, Lucene.STANDARD_ANALYZER));
assertEquals("Field [invalid] is defined both as an alias and a concrete field.", e.getMessage());
assertEquals("Alias [invalid] is defined both as an alias and a concrete field", e.getMessage());
}
public void testAliasThatRefersToAlias() {
@ -64,11 +65,13 @@ public class MapperMergeValidatorTests extends ESTestCase {
FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "field");
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "alias");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateNewMappers(
emptyList(),
singletonList(field),
Arrays.asList(alias, invalidAlias)));
MappingLookup mappers = new MappingLookup(
singletonList(field),
emptyList(),
Arrays.asList(alias, invalidAlias), 0, Lucene.STANDARD_ANALYZER);
alias.validate(mappers);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> invalidAlias.validate(mappers));
assertEquals("Invalid [path] value [alias] for field alias [invalid-alias]: an alias" +
" cannot refer to another alias.", e.getMessage());
@ -77,11 +80,13 @@ public class MapperMergeValidatorTests extends ESTestCase {
public void testAliasThatRefersToItself() {
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "invalid-alias");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateNewMappers(
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
MappingLookup mappers = new MappingLookup(
emptyList(),
emptyList(),
singletonList(invalidAlias)));
singletonList(invalidAlias), 0, null);
invalidAlias.validate(mappers);
});
assertEquals("Invalid [path] value [invalid-alias] for field alias [invalid-alias]: an alias" +
" cannot refer to itself.", e.getMessage());
@ -90,11 +95,13 @@ public class MapperMergeValidatorTests extends ESTestCase {
public void testAliasWithNonExistentPath() {
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "non-existent");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateNewMappers(
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
MappingLookup mappers = new MappingLookup(
emptyList(),
emptyList(),
singletonList(invalidAlias)));
singletonList(invalidAlias), 0, Lucene.STANDARD_ANALYZER);
invalidAlias.validate(mappers);
});
assertEquals("Invalid [path] value [non-existent] for field alias [invalid-alias]: an alias" +
" must refer to an existing field in the mappings.", e.getMessage());
@ -104,40 +111,38 @@ public class MapperMergeValidatorTests extends ESTestCase {
ObjectMapper objectMapper = createNestedObjectMapper("nested");
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested.alias", "nested.field");
MapperMergeValidator.validateFieldReferences(emptyList(),
MappingLookup mappers = new MappingLookup(
singletonList(createFieldMapper("nested", "field")),
singletonList(objectMapper),
singletonList(aliasMapper),
Collections.singletonMap("nested", objectMapper),
new FieldTypeLookup(),
new MetadataFieldMapper[0],
null);
0, Lucene.STANDARD_ANALYZER);
aliasMapper.validate(mappers);
}
public void testFieldAliasWithDifferentObjectScopes() {
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>();
fullPathObjectMappers.put("object1", createObjectMapper("object1"));
fullPathObjectMappers.put("object2", createObjectMapper("object2"));
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "object2.alias", "object1.field");
MapperMergeValidator.validateFieldReferences(emptyList(),
MappingLookup mappers = new MappingLookup(
singletonList(createFieldMapper("object1", "field")),
Arrays.asList(createObjectMapper("object1"), createObjectMapper("object2")),
singletonList(aliasMapper),
fullPathObjectMappers,
new FieldTypeLookup(),
new MetadataFieldMapper[0],
null);
0, Lucene.STANDARD_ANALYZER);
aliasMapper.validate(mappers);
}
public void testFieldAliasWithNestedTarget() {
ObjectMapper objectMapper = createNestedObjectMapper("nested");
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "alias", "nested.field");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateFieldReferences(emptyList(),
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
MappingLookup mappers = new MappingLookup(
singletonList(createFieldMapper("nested", "field")),
Collections.singletonList(objectMapper),
singletonList(aliasMapper),
Collections.singletonMap("nested", objectMapper),
new FieldTypeLookup(),
new MetadataFieldMapper[0],
null));
0, Lucene.STANDARD_ANALYZER);
aliasMapper.validate(mappers);
});
String expectedMessage = "Invalid [path] value [nested.field] for field alias [alias]: " +
"an alias must have the same nested scope as its target. The alias is not nested, " +
@ -146,19 +151,16 @@ public class MapperMergeValidatorTests extends ESTestCase {
}
public void testFieldAliasWithDifferentNestedScopes() {
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>();
fullPathObjectMappers.put("nested1", createNestedObjectMapper("nested1"));
fullPathObjectMappers.put("nested2", createNestedObjectMapper("nested2"));
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested2.alias", "nested1.field");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateFieldReferences(emptyList(),
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
MappingLookup mappers = new MappingLookup(
singletonList(createFieldMapper("nested1", "field")),
Arrays.asList(createNestedObjectMapper("nested1"), createNestedObjectMapper("nested2")),
singletonList(aliasMapper),
fullPathObjectMappers,
new FieldTypeLookup(),
new MetadataFieldMapper[0],
null));
0, Lucene.STANDARD_ANALYZER);
aliasMapper.validate(mappers);
});
String expectedMessage = "Invalid [path] value [nested1.field] for field alias [nested2.alias]: " +
@ -167,6 +169,15 @@ public class MapperMergeValidatorTests extends ESTestCase {
assertEquals(expectedMessage, e.getMessage());
}
private static final Settings SETTINGS = Settings.builder()
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT)
.build();
private static FieldMapper createFieldMapper(String parent, String name) {
Mapper.BuilderContext context = new Mapper.BuilderContext(SETTINGS, new ContentPath(parent));
return new BooleanFieldMapper.Builder(name).build(context);
}
private static ObjectMapper createObjectMapper(String name) {
return new ObjectMapper(name, name,
new Explicit<>(true, false),

View File

@ -155,7 +155,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
createMappingSpecifyingNumberOfFields(totalFieldsLimit + 1), updateOrPreflight());
});
assertTrue(e.getMessage(),
e.getMessage().contains("Limit of total fields [" + totalFieldsLimit + "] in index [test2] has been exceeded"));
e.getMessage().contains("Limit of total fields [" + totalFieldsLimit + "] has been exceeded"));
}
private CompressedXContent createMappingSpecifyingNumberOfFields(int numberOfFields) throws IOException {
@ -189,7 +189,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService1.mapperService().merge("type", objectMapping, updateOrPreflight()));
assertThat(e.getMessage(), containsString("Limit of mapping depth [1] in index [test1] has been exceeded"));
assertThat(e.getMessage(), containsString("Limit of mapping depth [1] has been exceeded"));
}
public void testUnmappedFieldType() {
@ -305,7 +305,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
Settings.builder().put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), numberOfNonAliasFields).build())
.mapperService().merge("type", new CompressedXContent(mapping), updateOrPreflight());
});
assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] in index [test2] has been exceeded", e.getMessage());
assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] has been exceeded", e.getMessage());
}
public void testDefaultMappingIsRejectedOn7() throws IOException {
@ -347,9 +347,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
mapperService.merge("type", mappingUpdate, updateOrPreflight());
});
assertEquals("Field name [" + testString + "] in index [test1] is too long. " +
"The limit is set to [" + maxFieldNameLength + "] characters but was ["
+ testString.length() + "] characters", e.getMessage());
assertEquals("Field name [" + testString + "] is longer than the limit of [" + maxFieldNameLength + "] characters",
e.getMessage());
}
public void testObjectNameLengthLimit() throws Throwable {
@ -372,9 +371,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
mapperService.merge("type", mapping, updateOrPreflight());
});
assertEquals("Field name [" + testString + "] in index [test1] is too long. " +
"The limit is set to [" + maxFieldNameLength + "] characters but was ["
+ testString.length() + "] characters", e.getMessage());
assertEquals("Field name [" + testString + "] is longer than the limit of [" + maxFieldNameLength + "] characters",
e.getMessage());
}
public void testAliasFieldNameLengthLimit() throws Throwable {
@ -401,9 +399,8 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
mapperService.merge("type", mapping, updateOrPreflight());
});
assertEquals("Field name [" + testString + "] in index [test1] is too long. " +
"The limit is set to [" + maxFieldNameLength + "] characters but was ["
+ testString.length() + "] characters", e.getMessage());
assertEquals("Field name [" + testString + "] is longer than the limit of [" + maxFieldNameLength + "] characters",
e.getMessage());
}
public void testMappingRecoverySkipFieldNameLengthLimit() throws Throwable {

View File

@ -569,14 +569,14 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
createIndex("test2", Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
assertThat(e.getMessage(), containsString("Limit of nested fields [0] has been exceeded"));
// setting limit to 1 with 2 nested fields fails
e = expectThrows(IllegalArgumentException.class, () ->
createIndex("test3", Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
assertThat(e.getMessage(), containsString("Limit of nested fields [1] has been exceeded"));
// do not check nested fields limit if mapping is not updated
createIndex("test4", Settings.builder()

View File

@ -1022,9 +1022,9 @@ public class TextFieldMapperTests extends FieldMapperTestCase<TextFieldMapper.Bu
.endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
indexService.mapperService().merge("type", new CompressedXContent(illegalMapping), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice."));
assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined more than once"));
}

View File

@ -137,19 +137,13 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject();
MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService();
try {
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Field [_id] is defined twice."));
}
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Field [_id] is defined more than once"));
try {
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Field [_id] is defined twice."));
}
MapperParsingException e2 = expectThrows(MapperParsingException.class, () ->
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(e2.getMessage(), containsString("Field [_id] is defined more than once"));
}
public void testRejectFieldDefinedTwice() throws IOException {

View File

@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -129,7 +129,8 @@ public class DataStreamTimestampFieldMapper extends MetadataFieldMapper {
this.enabled = enabled;
}
public void validate(DocumentFieldMappers lookup) {
@Override
public void doValidate(MappingLookup lookup) {
if (enabled == false) {
// not configured, so skip the validation
return;