Remove Mapper.updateFieldType() (#57151)
When we had multiple mapping types, an update to a field in one type had to be propagated to the same field in all other types. This was done using the Mapper.updateFieldType() method, called at the end of a merge. However, now that we only have a single type per index, this method is unnecessary and can be removed. Relates to #41059 Backport of #56986
This commit is contained in:
parent
343fb699a4
commit
d6b79bcd95
|
@ -647,16 +647,6 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
|||
this.maxShingleSize = maxShingleSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
SearchAsYouTypeFieldMapper fieldMapper = (SearchAsYouTypeFieldMapper) super.updateFieldType(fullNameToFieldType);
|
||||
fieldMapper.prefixField = (PrefixFieldMapper) fieldMapper.prefixField.updateFieldType(fullNameToFieldType);
|
||||
for (int i = 0; i < fieldMapper.shingleFields.length; i++) {
|
||||
fieldMapper.shingleFields[i] = (ShingleFieldMapper) fieldMapper.shingleFields[i].updateFieldType(fullNameToFieldType);
|
||||
}
|
||||
return fieldMapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
final String value = context.externalValueSet() ? context.externalValue().toString() : context.parser().textOrNull();
|
||||
|
|
|
@ -59,21 +59,25 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
static class Builder extends FieldMapper.Builder<Builder> {
|
||||
Builder() {
|
||||
|
||||
final String joinField;
|
||||
|
||||
Builder(String joinField) {
|
||||
super(NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
this.joinField = joinField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaJoinFieldMapper build(BuilderContext context) {
|
||||
fieldType.setName(NAME);
|
||||
return new MetaJoinFieldMapper(name, fieldType, context.indexSettings());
|
||||
return new MetaJoinFieldMapper(name, joinField, (MetaJoinFieldType) fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class MetaJoinFieldType extends StringFieldType {
|
||||
|
||||
private ParentJoinFieldMapper mapper;
|
||||
private String joinField;
|
||||
|
||||
MetaJoinFieldType() {}
|
||||
|
||||
|
@ -110,8 +114,12 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
|||
return binaryValue.utf8ToString();
|
||||
}
|
||||
|
||||
public ParentJoinFieldMapper getMapper() {
|
||||
return mapper;
|
||||
public void setJoinField(String joinField) {
|
||||
this.joinField = joinField;
|
||||
}
|
||||
|
||||
public String getJoinField() {
|
||||
return joinField;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -120,12 +128,10 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
MetaJoinFieldMapper(String name, MappedFieldType fieldType, Settings indexSettings) {
|
||||
MetaJoinFieldMapper(String name, String joinField, MetaJoinFieldType fieldType, Settings indexSettings) {
|
||||
super(name, fieldType, ParentIdFieldMapper.Defaults.FIELD_TYPE, indexSettings, MultiFields.empty(), CopyTo.empty());
|
||||
}
|
||||
fieldType.setJoinField(joinField);
|
||||
|
||||
void setFieldMapper(ParentJoinFieldMapper mapper) {
|
||||
fieldType().mapper = mapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -140,7 +146,6 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentFieldMappers;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -85,7 +87,13 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
public static ParentJoinFieldMapper getMapper(MapperService service) {
|
||||
MetaJoinFieldMapper.MetaJoinFieldType fieldType =
|
||||
(MetaJoinFieldMapper.MetaJoinFieldType) service.fieldType(MetaJoinFieldMapper.NAME);
|
||||
return fieldType == null ? null : fieldType.getMapper();
|
||||
if (fieldType == null) {
|
||||
return null;
|
||||
}
|
||||
DocumentMapper mapper = service.documentMapper();
|
||||
String joinField = fieldType.getJoinField();
|
||||
DocumentFieldMappers fieldMappers = mapper.mappers();
|
||||
return (ParentJoinFieldMapper) fieldMappers.getMapper(joinField);
|
||||
}
|
||||
|
||||
private static String getParentIdFieldName(String joinFieldName, String parentName) {
|
||||
|
@ -160,7 +168,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
})
|
||||
.forEach(parentIdFields::add);
|
||||
checkParentFields(name(), parentIdFields);
|
||||
MetaJoinFieldMapper unique = new MetaJoinFieldMapper.Builder().build(context);
|
||||
MetaJoinFieldMapper unique = new MetaJoinFieldMapper.Builder(name).build(context);
|
||||
return new ParentJoinFieldMapper(name, fieldType, context.indexSettings(),
|
||||
unique, Collections.unmodifiableList(parentIdFields), eagerGlobalOrdinals);
|
||||
}
|
||||
|
@ -262,7 +270,6 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, MultiFields.empty(), CopyTo.empty());
|
||||
this.parentIdFields = parentIdFields;
|
||||
this.uniqueFieldMapper = uniqueFieldMapper;
|
||||
this.uniqueFieldMapper.setFieldMapper(this);
|
||||
this.eagerGlobalOrdinals = eagerGlobalOrdinals;
|
||||
}
|
||||
|
||||
|
@ -353,20 +360,6 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
this.eagerGlobalOrdinals = joinMergeWith.eagerGlobalOrdinals;
|
||||
this.parentIdFields = Collections.unmodifiableList(newParentIdFields);
|
||||
this.uniqueFieldMapper = (MetaJoinFieldMapper) uniqueFieldMapper.merge(joinMergeWith.uniqueFieldMapper);
|
||||
uniqueFieldMapper.setFieldMapper(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
ParentJoinFieldMapper fieldMapper = (ParentJoinFieldMapper) super.updateFieldType(fullNameToFieldType);
|
||||
final List<ParentIdFieldMapper> newMappers = new ArrayList<> ();
|
||||
for (ParentIdFieldMapper mapper : fieldMapper.parentIdFields) {
|
||||
newMappers.add((ParentIdFieldMapper) mapper.updateFieldType(fullNameToFieldType));
|
||||
}
|
||||
fieldMapper.parentIdFields = Collections.unmodifiableList(newMappers);
|
||||
this.uniqueFieldMapper = (MetaJoinFieldMapper) uniqueFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
uniqueFieldMapper.setFieldMapper(this);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentFieldMappers;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -278,8 +280,13 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
|
|||
ParentJoinFieldMapper joinFieldMapper = createJoinFieldMapper();
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class);
|
||||
when(metaJoinFieldType.getMapper()).thenReturn(joinFieldMapper);
|
||||
when(metaJoinFieldType.getJoinField()).thenReturn("join_field");
|
||||
when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType);
|
||||
DocumentFieldMappers fieldMappers = new DocumentFieldMappers(Collections.singleton(joinFieldMapper),
|
||||
Collections.emptyList(), null, null, null);
|
||||
DocumentMapper mockMapper = mock(DocumentMapper.class);
|
||||
when(mockMapper.mappers()).thenReturn(fieldMappers);
|
||||
when(mapperService.documentMapper()).thenReturn(mockMapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentFieldMappers;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -166,8 +168,13 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
|
|||
ParentJoinFieldMapper joinFieldMapper = createJoinFieldMapper();
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class);
|
||||
when(metaJoinFieldType.getMapper()).thenReturn(joinFieldMapper);
|
||||
when(metaJoinFieldType.getJoinField()).thenReturn("join_field");
|
||||
when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType);
|
||||
DocumentFieldMappers fieldMappers = new DocumentFieldMappers(Collections.singleton(joinFieldMapper),
|
||||
Collections.emptyList(), null, null, null);
|
||||
DocumentMapper mockMapper = mock(DocumentMapper.class);
|
||||
when(mockMapper.mappers()).thenReturn(fieldMappers);
|
||||
when(mapperService.documentMapper()).thenReturn(mockMapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
|
|
|
@ -301,10 +301,11 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
docMapper = indexService.mapperService().merge("_doc", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE);
|
||||
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
|
||||
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
|
||||
assertNotNull(mapper);
|
||||
assertEquals("join_field", mapper.name());
|
||||
assertTrue(mapper.hasChild("child2"));
|
||||
assertFalse(mapper.hasParent("child2"));
|
||||
assertTrue(mapper.hasChild("grand_child2"));
|
||||
|
@ -322,10 +323,11 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
docMapper = indexService.mapperService().merge("_doc", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE);
|
||||
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
|
||||
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
|
||||
assertNotNull(mapper);
|
||||
assertEquals("join_field", mapper.name());
|
||||
assertTrue(mapper.hasParent("other"));
|
||||
assertFalse(mapper.hasChild("other"));
|
||||
assertTrue(mapper.hasChild("child_other1"));
|
||||
|
|
|
@ -362,30 +362,6 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
this.rangeFieldMapper = rangeFieldMapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType);
|
||||
KeywordFieldMapper queryTermsUpdated = (KeywordFieldMapper) queryTermsField.updateFieldType(fullNameToFieldType);
|
||||
KeywordFieldMapper extractionResultUpdated = (KeywordFieldMapper) extractionResultField.updateFieldType(fullNameToFieldType);
|
||||
BinaryFieldMapper queryBuilderUpdated = (BinaryFieldMapper) queryBuilderField.updateFieldType(fullNameToFieldType);
|
||||
RangeFieldMapper rangeFieldMapperUpdated = (RangeFieldMapper) rangeFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
NumberFieldMapper msmFieldMapperUpdated = (NumberFieldMapper) minimumShouldMatchFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
|
||||
if (updated == this && queryTermsUpdated == queryTermsField && extractionResultUpdated == extractionResultField
|
||||
&& queryBuilderUpdated == queryBuilderField && rangeFieldMapperUpdated == rangeFieldMapper) {
|
||||
return this;
|
||||
}
|
||||
if (updated == this) {
|
||||
updated = (PercolatorFieldMapper) updated.clone();
|
||||
}
|
||||
updated.queryTermsField = queryTermsUpdated;
|
||||
updated.extractionResultField = extractionResultUpdated;
|
||||
updated.queryBuilderField = queryBuilderUpdated;
|
||||
updated.rangeFieldMapper = rangeFieldMapperUpdated;
|
||||
updated.minimumShouldMatchFieldMapper = msmFieldMapperUpdated;
|
||||
return updated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(ParseContext context) throws IOException {
|
||||
QueryShardContext queryShardContext = this.queryShardContext.get();
|
||||
|
|
|
@ -322,19 +322,6 @@ public class DocumentMapper implements ToXContentFragment {
|
|||
return new DocumentMapper(mapperService, merged);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively update sub field types.
|
||||
*/
|
||||
public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
Mapping updated = this.mapping.updateFieldType(fullNameToFieldType);
|
||||
if (updated == this.mapping) {
|
||||
// no change
|
||||
return this;
|
||||
}
|
||||
assert updated == updated.updateFieldType(fullNameToFieldType) : "updateFieldType operation is not idempotent";
|
||||
return new DocumentMapper(mapperService, updated);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return mapping.toXContent(builder, params);
|
||||
|
|
|
@ -32,14 +32,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
@ -635,53 +632,6 @@ final class DocumentParser {
|
|||
}
|
||||
}
|
||||
|
||||
private static Mapper.Builder<?> createBuilderFromFieldType(final ParseContext context,
|
||||
MappedFieldType fieldType, String currentFieldName) {
|
||||
Mapper.Builder builder = null;
|
||||
if (fieldType instanceof TextFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "text", XContentFieldType.STRING);
|
||||
if (builder == null) {
|
||||
builder = new TextFieldMapper.Builder(currentFieldName)
|
||||
.addMultiField(new KeywordFieldMapper.Builder("keyword").ignoreAbove(256));
|
||||
}
|
||||
} else if (fieldType instanceof KeywordFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "keyword", XContentFieldType.STRING);
|
||||
} else {
|
||||
switch (fieldType.typeName()) {
|
||||
case DateFieldMapper.CONTENT_TYPE:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.DATE);
|
||||
break;
|
||||
case "long":
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "long", XContentFieldType.LONG);
|
||||
break;
|
||||
case "double":
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "double", XContentFieldType.DOUBLE);
|
||||
break;
|
||||
case "integer":
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "integer", XContentFieldType.LONG);
|
||||
break;
|
||||
case "float":
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "float", XContentFieldType.DOUBLE);
|
||||
break;
|
||||
case BooleanFieldMapper.CONTENT_TYPE:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean", XContentFieldType.BOOLEAN);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (builder == null) {
|
||||
Mapper.TypeParser.ParserContext parserContext = context.docMapperParser().parserContext();
|
||||
Mapper.TypeParser typeParser = parserContext.typeParser(fieldType.typeName());
|
||||
if (typeParser == null) {
|
||||
throw new MapperParsingException("Cannot generate dynamic mappings of type [" + fieldType.typeName()
|
||||
+ "] for [" + currentFieldName + "]");
|
||||
}
|
||||
builder = typeParser.parse(currentFieldName, new HashMap<>(), parserContext);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static Mapper.Builder<?> newLongBuilder(String name, Version indexCreated) {
|
||||
return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG);
|
||||
}
|
||||
|
@ -817,21 +767,9 @@ final class DocumentParser {
|
|||
if (dynamic == ObjectMapper.Dynamic.FALSE) {
|
||||
return;
|
||||
}
|
||||
final String path = context.path().pathAsText(currentFieldName);
|
||||
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), context.path());
|
||||
final MappedFieldType existingFieldType = context.mapperService().fieldType(path);
|
||||
final Mapper.Builder builder;
|
||||
if (existingFieldType != null) {
|
||||
// create a builder of the same type
|
||||
builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName);
|
||||
} else {
|
||||
builder = createBuilderFromDynamicValue(context, token, currentFieldName);
|
||||
}
|
||||
final Mapper.Builder<?> builder = createBuilderFromDynamicValue(context, token, currentFieldName);
|
||||
Mapper mapper = builder.build(builderContext);
|
||||
if (existingFieldType != null) {
|
||||
// try to not introduce a conflict
|
||||
mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType));
|
||||
}
|
||||
context.addDynamicMapper(mapper);
|
||||
|
||||
parseObjectOrField(context, mapper);
|
||||
|
|
|
@ -74,11 +74,6 @@ public final class FieldAliasMapper extends Mapper {
|
|||
return mergeWith;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Collections.emptyIterator();
|
||||
|
|
|
@ -413,27 +413,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
*/
|
||||
protected abstract void mergeOptions(FieldMapper other, List<String> conflicts);
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name());
|
||||
if (newFieldType == null) {
|
||||
// this field does not exist in the mappings yet
|
||||
// this can happen if this mapper represents a mapping update
|
||||
return this;
|
||||
} else if (fieldType.getClass() != newFieldType.getClass()) {
|
||||
throw new IllegalStateException("Mixing up field types: " +
|
||||
fieldType.getClass() + " != " + newFieldType.getClass() + " on field " + fieldType.name());
|
||||
}
|
||||
MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType);
|
||||
if (fieldType == newFieldType && multiFields == updatedMultiFields) {
|
||||
return this; // no change
|
||||
}
|
||||
FieldMapper updated = clone();
|
||||
updated.fieldType = newFieldType;
|
||||
updated.multiFields = updatedMultiFields;
|
||||
return updated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
|
@ -637,27 +616,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return new MultiFields(mappers);
|
||||
}
|
||||
|
||||
public MultiFields updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
|
||||
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType);
|
||||
if (updated != cursor.value) {
|
||||
if (newMappersBuilder == null) {
|
||||
newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
}
|
||||
newMappersBuilder.put(updated.simpleName(), updated);
|
||||
}
|
||||
}
|
||||
|
||||
if (newMappersBuilder == null) {
|
||||
return this;
|
||||
}
|
||||
|
||||
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
|
||||
return new MultiFields(mappers);
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
|
||||
}
|
||||
|
|
|
@ -172,10 +172,4 @@ public abstract class Mapper implements ToXContentFragment, Iterable<Mapper> {
|
|||
* Both {@code this} and {@code mergeWith} will be left unmodified. */
|
||||
public abstract Mapper merge(Mapper mergeWith);
|
||||
|
||||
/**
|
||||
* Update the field type of this mapper. This is necessary because some mapping updates
|
||||
* can modify mappings across several types. This method must return a copy of the mapper
|
||||
* so that the current mapper is not modified.
|
||||
*/
|
||||
public abstract Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType);
|
||||
}
|
||||
|
|
|
@ -119,7 +119,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
@Deprecated
|
||||
public static final Setting<Boolean> INDEX_MAPPER_DYNAMIC_SETTING =
|
||||
Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT,
|
||||
Property.Dynamic, Property.IndexScope, Property.Deprecated);
|
||||
Property.Dynamic, Property.IndexScope, Property.Deprecated);
|
||||
|
||||
//TODO this needs to be cleaned up: _timestamp and _ttl are not supported anymore, _field_names, _seq_no, _version and _source are
|
||||
//also missing, not sure if on purpose. See IndicesModule#getMetadataMappers
|
||||
|
@ -164,7 +164,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.indexAnalyzers = indexAnalyzers;
|
||||
this.fieldTypes = new FieldTypeLookup();
|
||||
this.documentParser = new DocumentMapperParser(indexSettings, this, xContentRegistry, similarityService, mapperRegistry,
|
||||
queryShardContextSupplier);
|
||||
queryShardContextSupplier);
|
||||
this.indexAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultIndexAnalyzer(), p -> p.indexAnalyzer());
|
||||
this.searchAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchAnalyzer(), p -> p.searchAnalyzer());
|
||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
|
||||
|
@ -172,7 +172,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.idFieldDataEnabled = idFieldDataEnabled;
|
||||
|
||||
if (INDEX_MAPPER_DYNAMIC_SETTING.exists(indexSettings.getSettings()) &&
|
||||
indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Setting " + INDEX_MAPPER_DYNAMIC_SETTING.getKey() + " was removed after version 6.0.0");
|
||||
}
|
||||
|
||||
|
@ -204,7 +204,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
*/
|
||||
public static Map<String, Object> parseMapping(NamedXContentRegistry xContentRegistry, String mappingSource) throws Exception {
|
||||
try (XContentParser parser = XContentType.JSON.xContent()
|
||||
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, mappingSource)) {
|
||||
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, mappingSource)) {
|
||||
return parser.map();
|
||||
}
|
||||
}
|
||||
|
@ -272,12 +272,12 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
|
||||
private void assertMappingVersion(
|
||||
final IndexMetadata currentIndexMetadata,
|
||||
final IndexMetadata newIndexMetadata,
|
||||
final Map<String, DocumentMapper> updatedEntries) {
|
||||
final IndexMetadata currentIndexMetadata,
|
||||
final IndexMetadata newIndexMetadata,
|
||||
final Map<String, DocumentMapper> updatedEntries) {
|
||||
if (Assertions.ENABLED
|
||||
&& currentIndexMetadata != null
|
||||
&& currentIndexMetadata.getCreationVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
&& currentIndexMetadata != null
|
||||
&& currentIndexMetadata.getCreationVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
if (currentIndexMetadata.getMappingVersion() == newIndexMetadata.getMappingVersion()) {
|
||||
// if the mapping version is unchanged, then there should not be any updates and all mappings should be the same
|
||||
assert updatedEntries.isEmpty() : updatedEntries;
|
||||
|
@ -287,8 +287,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final CompressedXContent currentSource = currentIndexMetadata.defaultMapping().source();
|
||||
final CompressedXContent newSource = defaultMapping.source();
|
||||
assert currentSource.equals(newSource) :
|
||||
"expected current mapping [" + currentSource + "] for type [" + defaultMapping.type() + "] "
|
||||
+ "to be the same as new mapping [" + newSource + "]";
|
||||
"expected current mapping [" + currentSource + "] for type [" + defaultMapping.type() + "] "
|
||||
+ "to be the same as new mapping [" + newSource + "]";
|
||||
}
|
||||
|
||||
MappingMetadata mapping = newIndexMetadata.mapping();
|
||||
|
@ -296,8 +296,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final CompressedXContent currentSource = currentIndexMetadata.mapping().source();
|
||||
final CompressedXContent newSource = mapping.source();
|
||||
assert currentSource.equals(newSource) :
|
||||
"expected current mapping [" + currentSource + "] for type [" + mapping.type() + "] "
|
||||
+ "to be the same as new mapping [" + newSource + "]";
|
||||
"expected current mapping [" + currentSource + "] for type [" + mapping.type() + "] "
|
||||
+ "to be the same as new mapping [" + newSource + "]";
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -305,8 +305,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final long currentMappingVersion = currentIndexMetadata.getMappingVersion();
|
||||
final long newMappingVersion = newIndexMetadata.getMappingVersion();
|
||||
assert currentMappingVersion < newMappingVersion :
|
||||
"expected current mapping version [" + currentMappingVersion + "] "
|
||||
+ "to be less than new mapping version [" + newMappingVersion + "]";
|
||||
"expected current mapping version [" + currentMappingVersion + "] "
|
||||
+ "to be less than new mapping version [" + newMappingVersion + "]";
|
||||
assert updatedEntries.isEmpty() == false;
|
||||
for (final DocumentMapper documentMapper : updatedEntries.values()) {
|
||||
final MappingMetadata currentMapping;
|
||||
|
@ -320,8 +320,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final CompressedXContent currentSource = currentMapping.source();
|
||||
final CompressedXContent newSource = documentMapper.mappingSource();
|
||||
assert currentSource.equals(newSource) == false :
|
||||
"expected current mapping [" + currentSource + "] for type [" + documentMapper.type() + "] " +
|
||||
"to be different than new mapping";
|
||||
"expected current mapping [" + currentSource + "] for type [" + documentMapper.type() + "] " +
|
||||
"to be different than new mapping";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -512,7 +512,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
// this check will be skipped.
|
||||
// Also, don't take metadata mappers into account for the field limit check
|
||||
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() - metadataMappers.length
|
||||
+ fieldAliasMappers.size() );
|
||||
+ fieldAliasMappers.size());
|
||||
checkFieldNameSoftLimit(objectMappers, fieldMappers, fieldAliasMappers);
|
||||
}
|
||||
|
||||
|
@ -530,15 +530,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
checkIndexSortCompatibility(indexSettings.getIndexSortConfig(), hasNested);
|
||||
|
||||
if (newMapper != null) {
|
||||
DocumentMapper updatedDocumentMapper = newMapper.updateFieldType(fieldTypes.fullNameToFieldType);
|
||||
if (updatedDocumentMapper != newMapper) {
|
||||
// update both mappers and result
|
||||
newMapper = updatedDocumentMapper;
|
||||
results.put(updatedDocumentMapper.type(), updatedDocumentMapper);
|
||||
}
|
||||
}
|
||||
|
||||
// make structures immutable
|
||||
results = Collections.unmodifiableMap(results);
|
||||
|
||||
|
@ -564,24 +555,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.hasNested = hasNested;
|
||||
this.fullPathObjectMappers = fullPathObjectMappers;
|
||||
|
||||
assert assertMappersShareSameFieldType();
|
||||
assert results.values().stream().allMatch(this::assertSerialization);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private boolean assertMappersShareSameFieldType() {
|
||||
if (mapper != null) {
|
||||
List<FieldMapper> fieldMappers = new ArrayList<>();
|
||||
Collections.addAll(fieldMappers, mapper.mapping().metadataMappers);
|
||||
MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers, new ArrayList<>());
|
||||
for (FieldMapper fieldMapper : fieldMappers) {
|
||||
assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean assertSerialization(DocumentMapper mapper) {
|
||||
// capture the source now, it may change due to concurrent parsing
|
||||
final CompressedXContent mappingSource = mapper.mappingSource();
|
||||
|
@ -634,7 +612,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final int depth = numDots + 2;
|
||||
if (depth > maxDepth) {
|
||||
throw new IllegalArgumentException("Limit of mapping depth [" + maxDepth + "] in index [" + index().getName()
|
||||
+ "] has been exceeded due to object field [" + objectPath + "]");
|
||||
+ "] has been exceeded due to object field [" + objectPath + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -660,7 +638,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
if (indexSettings.getIndexMetadata().isRoutingPartitionedIndex()) {
|
||||
if (!newMapper.routingFieldMapper().required()) {
|
||||
throw new IllegalArgumentException("mapping type [" + newMapper.type() + "] must have routing "
|
||||
+ "required for partitioned index [" + indexSettings.getIndex().getName() + "]");
|
||||
+ "required for partitioned index [" + indexSettings.getIndex().getName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -723,7 +701,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
/**
|
||||
* Resolves a type from a mapping-related request into the type that should be used when
|
||||
* merging and updating mappings.
|
||||
*
|
||||
* <p>
|
||||
* If the special `_doc` type is provided, then we replace it with the actual type that is
|
||||
* being used in the mappings. This allows typeless APIs such as 'index' or 'put mappings'
|
||||
* to work against indices with a custom type name.
|
||||
|
@ -798,7 +776,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
final Mapper.Builder<?> builder = typeParser.parse("__anonymous_" + type, emptyMap(), parserContext);
|
||||
final BuilderContext builderContext = new BuilderContext(indexSettings.getSettings(), new ContentPath(1));
|
||||
fieldType = ((FieldMapper)builder.build(builderContext)).fieldType();
|
||||
fieldType = ((FieldMapper) builder.build(builderContext)).fieldType();
|
||||
|
||||
// There is no need to synchronize writes here. In the case of concurrent access, we could just
|
||||
// compute some mappers several times, which is not a big deal
|
||||
|
@ -844,7 +822,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return Arrays.copyOf(SORTED_META_FIELDS, SORTED_META_FIELDS.length);
|
||||
}
|
||||
|
||||
/** An analyzer wrapper that can lookup fields within the index mappings */
|
||||
/**
|
||||
* An analyzer wrapper that can lookup fields within the index mappings
|
||||
*/
|
||||
final class MapperAnalyzerWrapper extends DelegatingAnalyzerWrapper {
|
||||
|
||||
private final Analyzer defaultAnalyzer;
|
||||
|
|
|
@ -104,28 +104,6 @@ public final class Mapping implements ToXContentFragment {
|
|||
return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively update sub field types.
|
||||
*/
|
||||
public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
MetadataFieldMapper[] updatedMeta = null;
|
||||
for (int i = 0; i < metadataMappers.length; ++i) {
|
||||
MetadataFieldMapper currentFieldMapper = metadataMappers[i];
|
||||
MetadataFieldMapper updatedFieldMapper = (MetadataFieldMapper) currentFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
if (updatedFieldMapper != currentFieldMapper) {
|
||||
if (updatedMeta == null) {
|
||||
updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
|
||||
}
|
||||
updatedMeta[i] = updatedFieldMapper;
|
||||
}
|
||||
}
|
||||
RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType);
|
||||
if (updatedMeta == null && updatedRoot == root) {
|
||||
return this;
|
||||
}
|
||||
return new Mapping(indexCreated, updatedRoot, updatedMeta == null ? metadataMappers : updatedMeta, meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
root.toXContent(builder, params, new ToXContent() {
|
||||
|
|
|
@ -494,28 +494,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
List<Mapper> updatedMappers = null;
|
||||
for (Mapper mapper : this) {
|
||||
Mapper updated = mapper.updateFieldType(fullNameToFieldType);
|
||||
if (mapper != updated) {
|
||||
if (updatedMappers == null) {
|
||||
updatedMappers = new ArrayList<>();
|
||||
}
|
||||
updatedMappers.add(updated);
|
||||
}
|
||||
}
|
||||
if (updatedMappers == null) {
|
||||
return this;
|
||||
}
|
||||
ObjectMapper updated = clone();
|
||||
for (Mapper updatedMapper : updatedMappers) {
|
||||
updated.putMapper(updatedMapper);
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
toXContent(builder, params, null);
|
||||
|
|
|
@ -301,11 +301,6 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public RootObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
return (RootObjectMapper) super.updateFieldType(fullNameToFieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
final boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
|
|
@ -856,18 +856,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
TextFieldMapper mapper = (TextFieldMapper) super.updateFieldType(fullNameToFieldType);
|
||||
if (mapper.prefixFieldMapper != null) {
|
||||
mapper.prefixFieldMapper = (PrefixFieldMapper) mapper.prefixFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
}
|
||||
if (mapper.phraseFieldMapper != null) {
|
||||
mapper.phraseFieldMapper = (PhraseFieldMapper) mapper.phraseFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
}
|
||||
return mapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
TextFieldMapper mw = (TextFieldMapper) other;
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
|
@ -34,9 +33,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.BooleanFieldMapper.BooleanFieldType;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
@ -49,6 +45,7 @@ import java.util.Collections;
|
|||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -436,104 +433,37 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().endObject()), serialize(update));
|
||||
}
|
||||
|
||||
public void testReuseExistingMappings() throws IOException, Exception {
|
||||
public void testReuseExistingMappings() throws Exception {
|
||||
|
||||
IndexService indexService = createIndex("test", Settings.EMPTY, "type",
|
||||
"my_field1", "type=text,store=true",
|
||||
"my_field2", "type=integer,store=false",
|
||||
"my_field3", "type=long,doc_values=false",
|
||||
"my_field4", "type=float,index=false",
|
||||
"my_field5", "type=double,store=true",
|
||||
"my_field6", "type=date,doc_values=false",
|
||||
"my_field7", "type=boolean,doc_values=false");
|
||||
"my_field1", "type=text,store=true",
|
||||
"my_field2", "type=integer,store=false",
|
||||
"my_field3", "type=long,doc_values=false",
|
||||
"my_field4", "type=float,index=false",
|
||||
"my_field5", "type=double,store=true",
|
||||
"my_field6", "type=date,doc_values=false",
|
||||
"my_field7", "type=boolean,doc_values=false");
|
||||
|
||||
// Even if the dynamic type of our new field is long, we already have a mapping for the same field
|
||||
// of type string so it should be mapped as a string
|
||||
DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper();
|
||||
DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type").getDocumentMapper();
|
||||
Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(),
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.field("my_field1", 42)
|
||||
.field("my_field2", 43)
|
||||
.field("my_field3", 44)
|
||||
.field("my_field4", 45)
|
||||
.field("my_field5", 46)
|
||||
.field("my_field6", Instant.now().toEpochMilli())
|
||||
.field("my_field7", true)
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.field("my_field1", 42)
|
||||
.field("my_field2", 43)
|
||||
.field("my_field3", 44)
|
||||
.field("my_field4", 45)
|
||||
.field("my_field5", 46)
|
||||
.field("my_field6", Instant.now().toEpochMilli())
|
||||
.field("my_field7", true)
|
||||
.endObject());
|
||||
Mapper myField1Mapper = null;
|
||||
Mapper myField2Mapper = null;
|
||||
Mapper myField3Mapper = null;
|
||||
Mapper myField4Mapper = null;
|
||||
Mapper myField5Mapper = null;
|
||||
Mapper myField6Mapper = null;
|
||||
Mapper myField7Mapper = null;
|
||||
for (Mapper m : update) {
|
||||
switch (m.name()) {
|
||||
case "my_field1":
|
||||
myField1Mapper = m;
|
||||
break;
|
||||
case "my_field2":
|
||||
myField2Mapper = m;
|
||||
break;
|
||||
case "my_field3":
|
||||
myField3Mapper = m;
|
||||
break;
|
||||
case "my_field4":
|
||||
myField4Mapper = m;
|
||||
break;
|
||||
case "my_field5":
|
||||
myField5Mapper = m;
|
||||
break;
|
||||
case "my_field6":
|
||||
myField6Mapper = m;
|
||||
break;
|
||||
case "my_field7":
|
||||
myField7Mapper = m;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertNotNull(myField1Mapper);
|
||||
// same type
|
||||
assertTrue(myField1Mapper instanceof TextFieldMapper);
|
||||
// and same option
|
||||
assertTrue(((TextFieldMapper) myField1Mapper).fieldType().stored());
|
||||
assertNull(update);
|
||||
|
||||
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
|
||||
// since we already have a mapping of type integer
|
||||
assertNotNull(myField2Mapper);
|
||||
// same type
|
||||
assertEquals("integer", ((FieldMapper) myField2Mapper).fieldType().typeName());
|
||||
// and same option
|
||||
assertFalse(((FieldMapper) myField2Mapper).fieldType().stored());
|
||||
|
||||
assertNotNull(myField3Mapper);
|
||||
assertTrue(myField3Mapper instanceof NumberFieldMapper);
|
||||
assertFalse(((NumberFieldType) ((NumberFieldMapper) myField3Mapper).fieldType()).hasDocValues());
|
||||
|
||||
assertNotNull(myField4Mapper);
|
||||
assertTrue(myField4Mapper instanceof NumberFieldMapper);
|
||||
assertEquals(IndexOptions.NONE, ((FieldMapper) myField4Mapper).fieldType().indexOptions());
|
||||
|
||||
assertNotNull(myField5Mapper);
|
||||
|
||||
assertTrue(myField5Mapper instanceof NumberFieldMapper);
|
||||
assertTrue(((NumberFieldMapper) myField5Mapper).fieldType().stored());
|
||||
|
||||
assertNotNull(myField6Mapper);
|
||||
assertTrue(myField6Mapper instanceof DateFieldMapper);
|
||||
assertFalse(((DateFieldType) ((DateFieldMapper) myField6Mapper).fieldType()).hasDocValues());
|
||||
|
||||
assertNotNull(myField7Mapper);
|
||||
assertTrue(myField7Mapper instanceof BooleanFieldMapper);
|
||||
assertFalse(((BooleanFieldType) ((BooleanFieldMapper) myField7Mapper).fieldType()).hasDocValues());
|
||||
|
||||
// This can't work
|
||||
try {
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
|
||||
parse(newMapper, indexService.mapperService().documentMapperParser(),
|
||||
XContentFactory.jsonBuilder().startObject().field("my_field2", "foobar").endObject());
|
||||
fail("Cannot succeed, incompatible types");
|
||||
} catch (MapperParsingException e) {
|
||||
// expected
|
||||
}
|
||||
XContentFactory.jsonBuilder().startObject().field("my_field2", "foobar").endObject());
|
||||
});
|
||||
assertThat(e.getMessage(), containsString("failed to parse field [my_field2] of type [integer]"));
|
||||
}
|
||||
|
||||
public void testMixTemplateMultiFieldAndMappingReuse() throws Exception {
|
||||
|
|
|
@ -207,37 +207,6 @@ public class ExternalMapper extends FieldMapper {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType);
|
||||
MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType);
|
||||
BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType);
|
||||
BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType);
|
||||
GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType);
|
||||
AbstractShapeGeometryFieldMapper shapeMapperUpdate =
|
||||
(AbstractShapeGeometryFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType);
|
||||
TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
|
||||
if (update == this
|
||||
&& multiFieldsUpdate == multiFields
|
||||
&& binMapperUpdate == binMapper
|
||||
&& boolMapperUpdate == boolMapper
|
||||
&& pointMapperUpdate == pointMapper
|
||||
&& shapeMapperUpdate == shapeMapper
|
||||
&& stringMapperUpdate == stringMapper) {
|
||||
return this;
|
||||
}
|
||||
if (update == this) {
|
||||
update = (ExternalMapper) clone();
|
||||
}
|
||||
update.multiFields = multiFieldsUpdate;
|
||||
update.binMapper = binMapperUpdate;
|
||||
update.boolMapper = boolMapperUpdate;
|
||||
update.pointMapper = pointMapperUpdate;
|
||||
update.shapeMapper = shapeMapperUpdate;
|
||||
update.stringMapper = stringMapperUpdate;
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator());
|
||||
|
|
|
@ -51,16 +51,19 @@ import org.apache.lucene.util.TestUtil;
|
|||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
|
@ -101,6 +104,8 @@ import java.util.stream.Collectors;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class CompositeAggregatorTests extends AggregatorTestCase {
|
||||
private static MappedFieldType[] FIELD_TYPES;
|
||||
|
@ -153,6 +158,16 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
FIELD_TYPES = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MapperService mapperServiceMock() {
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
DocumentMapper mapper = mock(DocumentMapper.class);
|
||||
when(mapper.typeText()).thenReturn(new Text("_doc"));
|
||||
when(mapper.type()).thenReturn("_doc");
|
||||
when(mapperService.documentMapper()).thenReturn(mapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public void testUnmappedField() throws Exception {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
|
@ -2102,7 +2117,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
return sortFields.size() > 0 ? new Sort(sortFields.toArray(new SortField[0])) : null;
|
||||
}
|
||||
|
||||
|
||||
private static SortField sortFieldFrom(MappedFieldType type) {
|
||||
if (type instanceof KeywordFieldMapper.KeywordFieldType) {
|
||||
return new SortedSetSortField(type.name(), false);
|
||||
|
|
|
@ -40,10 +40,13 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeType;
|
||||
|
@ -79,6 +82,8 @@ import java.util.function.Consumer;
|
|||
import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class RareTermsAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
|
@ -96,6 +101,16 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
|
|||
dataset = d;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MapperService mapperServiceMock() {
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
DocumentMapper mapper = mock(DocumentMapper.class);
|
||||
when(mapper.typeText()).thenReturn(new Text("_doc"));
|
||||
when(mapper.type()).thenReturn("_doc");
|
||||
when(mapperService.documentMapper()).thenReturn(mapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public void testMatchNoDocs() throws IOException {
|
||||
testBothCases(new MatchNoDocsQuery(), dataset,
|
||||
aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1),
|
||||
|
|
|
@ -42,13 +42,16 @@ import org.apache.lucene.util.NumericUtils;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.common.util.MockPageCacheRecycler;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeType;
|
||||
|
@ -114,6 +117,8 @@ import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.b
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class TermsAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
|
@ -123,6 +128,16 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
|||
private static final String STRING_SCRIPT_NAME = "string_script";
|
||||
private static final String STRING_SCRIPT_OUTPUT = "Orange";
|
||||
|
||||
@Override
|
||||
protected MapperService mapperServiceMock() {
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
DocumentMapper mapper = mock(DocumentMapper.class);
|
||||
when(mapper.typeText()).thenReturn(new Text("_doc"));
|
||||
when(mapper.type()).thenReturn("_doc");
|
||||
when(mapperService.documentMapper()).thenReturn(mapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptService getMockScriptService() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
|
|
@ -39,9 +39,12 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
@ -56,8 +59,21 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class TopHitsAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
@Override
|
||||
protected MapperService mapperServiceMock() {
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
DocumentMapper mapper = mock(DocumentMapper.class);
|
||||
when(mapper.typeText()).thenReturn(new Text("type"));
|
||||
when(mapper.type()).thenReturn("type");
|
||||
when(mapperService.documentMapper()).thenReturn(mapper);
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public void testTopLevel() throws Exception {
|
||||
Aggregation result;
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -55,7 +55,6 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.common.util.MockPageCacheRecycler;
|
||||
|
@ -74,7 +73,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
|||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.CompletionFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldAliasMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
|
@ -300,10 +298,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
MapperService mapperService = mapperServiceMock();
|
||||
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
|
||||
when(mapperService.hasNested()).thenReturn(false);
|
||||
DocumentMapper mapper = mock(DocumentMapper.class);
|
||||
when(mapper.typeText()).thenReturn(new Text(TYPE_NAME));
|
||||
when(mapper.type()).thenReturn(TYPE_NAME);
|
||||
when(mapperService.documentMapper()).thenReturn(mapper);
|
||||
when(searchContext.mapperService()).thenReturn(mapperService);
|
||||
IndexFieldDataService ifds = new IndexFieldDataService(indexSettings,
|
||||
new IndicesFieldDataCache(Settings.EMPTY, new IndexFieldDataCache.Listener() {
|
||||
|
@ -499,7 +493,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
a.preCollection();
|
||||
subSearcher.search(weight, a);
|
||||
a.postCollection();
|
||||
InternalAggregation agg = a.buildTopLevel();
|
||||
InternalAggregation agg = a.buildTopLevel();
|
||||
aggs.add(agg);
|
||||
InternalAggregationTestCase.assertMultiBucketConsumer(agg, shardBucketConsumer);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue