Make mappings immutable.
Today mappings are mutable because of two APIs: - Mapper.merge, which expects changes to be performed in-place - IncludeInAll, which allows to change whether values should be put in the `_all` field in place. This commit changes both APIs to return a modified copy instead of modifying in place so that mappings can be immutable. For now, only the type-level object is immutable, but in the future we can imagine making them immutable at the index-level so that mapping updates could be completely atomic at the index level. Close #9365
This commit is contained in:
parent
5f7b863067
commit
50eeafa75c
|
@ -37,7 +37,6 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.InvalidTypeNameException;
|
||||
import org.elasticsearch.percolator.PercolatorService;
|
||||
|
@ -251,11 +250,8 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null);
|
||||
if (existingMapper != null) {
|
||||
// first, simulate
|
||||
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
|
||||
// if we have conflicts, throw an exception
|
||||
if (mergeResult.hasConflicts()) {
|
||||
throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(mergeResult.buildConflicts()) + "}");
|
||||
}
|
||||
// this will just throw exceptions in case of problems
|
||||
existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
|
||||
} else {
|
||||
// TODO: can we find a better place for this validation?
|
||||
// The reason this validation is here is that the mapper service doesn't learn about
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -117,7 +118,7 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
private volatile CompressedXContent mappingSource;
|
||||
|
||||
private final Mapping mapping;
|
||||
private volatile Mapping mapping;
|
||||
|
||||
private final DocumentParser documentParser;
|
||||
|
||||
|
@ -352,16 +353,19 @@ public class DocumentMapper implements ToXContent {
|
|||
mapperService.addMappers(type, objectMappers, fieldMappers);
|
||||
}
|
||||
|
||||
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
|
||||
public void merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
|
||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||
mapperService.checkMappersCompatibility(type, mapping, updateAllTypes);
|
||||
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
// do the merge even if simulate == false so that we get exceptions
|
||||
Mapping merged = this.mapping.merge(mapping, updateAllTypes);
|
||||
if (simulate == false) {
|
||||
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers(), updateAllTypes);
|
||||
this.mapping = merged;
|
||||
Collection<ObjectMapper> objectMappers = new ArrayList<>();
|
||||
Collection<FieldMapper> fieldMappers = new ArrayList<>(Arrays.asList(merged.metadataMappers));
|
||||
MapperUtils.collect(merged.root, objectMappers, fieldMappers);
|
||||
addMappers(objectMappers, fieldMappers, updateAllTypes);
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -267,7 +267,7 @@ class DocumentParser implements Closeable {
|
|||
if (update == null) {
|
||||
update = newUpdate;
|
||||
} else {
|
||||
MapperUtils.merge(update, newUpdate);
|
||||
update = update.merge(newUpdate, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -759,7 +759,7 @@ class DocumentParser implements Closeable {
|
|||
private static <M extends Mapper> M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException {
|
||||
final Mapper update = parseObjectOrField(context, mapper);
|
||||
if (update != null) {
|
||||
MapperUtils.merge(mapper, update);
|
||||
mapper = (M) mapper.merge(update, false);
|
||||
}
|
||||
return mapper;
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public abstract class FieldMapper extends Mapper {
|
||||
public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends FieldMapper> extends Mapper.Builder<T, Y> {
|
||||
|
||||
|
@ -84,8 +84,13 @@ public abstract class FieldMapper extends Mapper {
|
|||
* if the fieldType has a non-null option we are all good it might have been set through a different
|
||||
* call.
|
||||
*/
|
||||
final IndexOptions options = getDefaultIndexOption();
|
||||
assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing";
|
||||
IndexOptions options = getDefaultIndexOption();
|
||||
if (options == IndexOptions.NONE) {
|
||||
// can happen when an existing type on the same index has disabled indexing
|
||||
// since we inherit the default field type from the first mapper that is
|
||||
// created on an index
|
||||
throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types of the same index");
|
||||
}
|
||||
fieldType.setIndexOptions(options);
|
||||
}
|
||||
} else {
|
||||
|
@ -270,7 +275,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
|
||||
protected MappedFieldTypeReference fieldTypeRef;
|
||||
protected final MappedFieldType defaultFieldType;
|
||||
protected final MultiFields multiFields;
|
||||
protected MultiFields multiFields;
|
||||
protected CopyTo copyTo;
|
||||
protected final boolean indexCreatedBefore2x;
|
||||
|
||||
|
@ -359,27 +364,42 @@ public abstract class FieldMapper extends Mapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected FieldMapper clone() {
|
||||
try {
|
||||
return (FieldMapper) super.clone();
|
||||
} catch (CloneNotSupportedException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper merge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
FieldMapper merged = clone();
|
||||
merged.doMerge(mergeWith, updateAllTypes);
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge changes coming from {@code mergeWith} in place.
|
||||
* @param updateAllTypes TODO
|
||||
*/
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
String mergedType = mergeWith.getClass().getSimpleName();
|
||||
if (mergeWith instanceof FieldMapper) {
|
||||
mergedType = ((FieldMapper) mergeWith).contentType();
|
||||
}
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
|
||||
// different types, return
|
||||
return;
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
|
||||
}
|
||||
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
|
||||
multiFields.merge(mergeWith, mergeResult);
|
||||
multiFields = multiFields.merge(fieldMergeWith.multiFields);
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
// apply changeable values
|
||||
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
|
||||
fieldType.freeze();
|
||||
fieldTypeRef.set(fieldType);
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
@ -565,18 +585,20 @@ public abstract class FieldMapper extends Mapper {
|
|||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
private volatile ImmutableOpenMap<String, FieldMapper> mappers;
|
||||
private final ImmutableOpenMap<String, FieldMapper> mappers;
|
||||
|
||||
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
private MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
this.pathType = pathType;
|
||||
this.mappers = mappers;
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> builder = new ImmutableOpenMap.Builder<>();
|
||||
// we disable the all in multi-field mappers
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
for (ObjectObjectCursor<String, FieldMapper> cursor : mappers) {
|
||||
FieldMapper mapper = cursor.value;
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
|
||||
mapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
|
||||
}
|
||||
builder.put(cursor.key, mapper);
|
||||
}
|
||||
this.mappers = builder.build();
|
||||
}
|
||||
|
||||
public void parse(FieldMapper mainField, ParseContext context) throws IOException {
|
||||
|
@ -598,47 +620,29 @@ public abstract class FieldMapper extends Mapper {
|
|||
context.path().pathType(origPathType);
|
||||
}
|
||||
|
||||
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
FieldMapper mergeWithMultiField = (FieldMapper) mergeWith;
|
||||
public MultiFields merge(MultiFields mergeWith) {
|
||||
if (pathType != mergeWith.pathType) {
|
||||
throw new IllegalArgumentException("Can't change path type from [" + pathType + "] to [" + mergeWith.pathType + "]");
|
||||
}
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
|
||||
List<FieldMapper> newFieldMappers = null;
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
|
||||
|
||||
for (ObjectCursor<FieldMapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
|
||||
for (ObjectCursor<FieldMapper> cursor : mergeWith.mappers.values()) {
|
||||
FieldMapper mergeWithMapper = cursor.value;
|
||||
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
|
||||
FieldMapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeResult.simulate()) {
|
||||
// we disable the all in multi-field mappers
|
||||
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
|
||||
}
|
||||
if (newMappersBuilder == null) {
|
||||
newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
mergeWithMapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
|
||||
}
|
||||
newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper);
|
||||
if (mergeWithMapper instanceof FieldMapper) {
|
||||
if (newFieldMappers == null) {
|
||||
newFieldMappers = new ArrayList<>(2);
|
||||
}
|
||||
newFieldMappers.add(mergeWithMapper);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
|
||||
FieldMapper merged = mergeIntoMapper.merge(mergeWithMapper, false);
|
||||
newMappersBuilder.put(merged.simpleName(), merged); // override previous definition
|
||||
}
|
||||
}
|
||||
|
||||
// first add all field mappers
|
||||
if (newFieldMappers != null) {
|
||||
mergeResult.addFieldMappers(newFieldMappers);
|
||||
}
|
||||
// now publish mappers
|
||||
if (newMappersBuilder != null) {
|
||||
mappers = newMappersBuilder.build();
|
||||
}
|
||||
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
|
||||
return new MultiFields(pathType, mappers);
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
|
|
|
@ -174,5 +174,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
|
|||
/** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */
|
||||
public abstract String name();
|
||||
|
||||
public abstract void merge(Mapper mergeWith, MergeResult mergeResult);
|
||||
/** Return the merge of {@code mergeWith} into this.
|
||||
* Both {@code this} and {@code mergeWith} will be left unmodified. */
|
||||
public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes);
|
||||
}
|
||||
|
|
|
@ -251,14 +251,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
DocumentMapper oldMapper = mappers.get(mapper.type());
|
||||
|
||||
if (oldMapper != null) {
|
||||
// simulate first
|
||||
MergeResult result = oldMapper.merge(mapper.mapping(), true, updateAllTypes);
|
||||
if (result.hasConflicts()) {
|
||||
throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(result.buildConflicts()) + "}");
|
||||
}
|
||||
// then apply for real
|
||||
result = oldMapper.merge(mapper.mapping(), false, updateAllTypes);
|
||||
assert result.hasConflicts() == false; // we already simulated
|
||||
oldMapper.merge(mapper.mapping(), false, updateAllTypes);
|
||||
return oldMapper;
|
||||
} else {
|
||||
Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> newMappers = checkMappersCompatibility(
|
||||
|
@ -305,12 +298,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
for (ObjectMapper newObjectMapper : objectMappers) {
|
||||
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
|
||||
if (existingObjectMapper != null) {
|
||||
MergeResult result = new MergeResult(true, updateAllTypes);
|
||||
existingObjectMapper.merge(newObjectMapper, result);
|
||||
if (result.hasConflicts()) {
|
||||
throw new IllegalArgumentException("Mapper for [" + newObjectMapper.fullPath() + "] conflicts with existing mapping in other types" +
|
||||
Arrays.toString(result.buildConflicts()));
|
||||
}
|
||||
// simulate a merge and ignore the result, we are just interested
|
||||
// in exceptions here
|
||||
existingObjectMapper.merge(newObjectMapper, updateAllTypes);
|
||||
}
|
||||
}
|
||||
fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes);
|
||||
|
@ -320,9 +310,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
String type, Mapping mapping, boolean updateAllTypes) {
|
||||
List<ObjectMapper> objectMappers = new ArrayList<>();
|
||||
List<FieldMapper> fieldMappers = new ArrayList<>();
|
||||
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||
fieldMappers.add(metadataMapper);
|
||||
}
|
||||
Collections.addAll(fieldMappers, mapping.metadataMappers);
|
||||
MapperUtils.collect(mapping.root, objectMappers, fieldMappers);
|
||||
checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes);
|
||||
return new Tuple<>(objectMappers, fieldMappers);
|
||||
|
|
|
@ -27,52 +27,6 @@ import java.util.Collection;
|
|||
public enum MapperUtils {
|
||||
;
|
||||
|
||||
private static MergeResult newStrictMergeResult() {
|
||||
return new MergeResult(false, false) {
|
||||
|
||||
@Override
|
||||
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<FieldMapper> getNewFieldMappers() {
|
||||
throw new UnsupportedOperationException("Strict merge result does not support new field mappers");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ObjectMapper> getNewObjectMappers() {
|
||||
throw new UnsupportedOperationException("Strict merge result does not support new object mappers");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addConflict(String mergeFailure) {
|
||||
throw new MapperParsingException("Merging dynamic updates triggered a conflict: " + mergeFailure);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
|
||||
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
|
||||
*/
|
||||
public static void merge(Mapper mergeInto, Mapper mergeWith) {
|
||||
mergeInto.merge(mergeWith, newStrictMergeResult());
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
|
||||
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
|
||||
*/
|
||||
public static void merge(Mapping mergeInto, Mapping mergeWith) {
|
||||
mergeInto.merge(mergeWith, newStrictMergeResult());
|
||||
}
|
||||
|
||||
/** Split mapper and its descendants into object and field mappers. */
|
||||
public static void collect(Mapper mapper, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
||||
if (mapper instanceof RootObjectMapper) {
|
||||
|
|
|
@ -47,19 +47,19 @@ public final class Mapping implements ToXContent {
|
|||
final RootObjectMapper root;
|
||||
final MetadataFieldMapper[] metadataMappers;
|
||||
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
|
||||
volatile Map<String, Object> meta;
|
||||
final Map<String, Object> meta;
|
||||
|
||||
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
|
||||
this.indexCreated = indexCreated;
|
||||
this.root = rootObjectMapper;
|
||||
this.metadataMappers = metadataMappers;
|
||||
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
|
||||
for (MetadataFieldMapper metadataMapper : metadataMappers) {
|
||||
if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) {
|
||||
root.putMapper(metadataMapper);
|
||||
rootObjectMapper = rootObjectMapper.copyAndPutMapper(metadataMapper);
|
||||
}
|
||||
metadataMappersMap.put(metadataMapper.getClass(), metadataMapper);
|
||||
}
|
||||
this.root = rootObjectMapper;
|
||||
// keep root mappers sorted for consistent serialization
|
||||
Arrays.sort(metadataMappers, new Comparator<Mapper>() {
|
||||
@Override
|
||||
|
@ -90,21 +90,20 @@ public final class Mapping implements ToXContent {
|
|||
}
|
||||
|
||||
/** @see DocumentMapper#merge(Mapping, boolean, boolean) */
|
||||
public void merge(Mapping mergeWith, MergeResult mergeResult) {
|
||||
assert metadataMappers.length == mergeWith.metadataMappers.length;
|
||||
|
||||
root.merge(mergeWith.root, mergeResult);
|
||||
for (MetadataFieldMapper metadataMapper : metadataMappers) {
|
||||
MetadataFieldMapper mergeWithMetadataMapper = mergeWith.metadataMapper(metadataMapper.getClass());
|
||||
if (mergeWithMetadataMapper != null) {
|
||||
metadataMapper.merge(mergeWithMetadataMapper, mergeResult);
|
||||
public Mapping merge(Mapping mergeWith, boolean updateAllTypes) {
|
||||
RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes);
|
||||
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap);
|
||||
for (MetadataFieldMapper metaMergeWith : mergeWith.metadataMappers) {
|
||||
MetadataFieldMapper mergeInto = mergedMetaDataMappers.get(metaMergeWith.getClass());
|
||||
MetadataFieldMapper merged;
|
||||
if (mergeInto == null) {
|
||||
merged = metaMergeWith;
|
||||
} else {
|
||||
merged = mergeInto.merge(metaMergeWith, updateAllTypes);
|
||||
}
|
||||
mergedMetaDataMappers.put(merged.getClass(), merged);
|
||||
}
|
||||
|
||||
if (mergeResult.simulate() == false) {
|
||||
// let the merge with attributes to override the attributes
|
||||
meta = mergeWith.meta;
|
||||
}
|
||||
return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/** A container for tracking results of a mapping merge. */
|
||||
public class MergeResult {
|
||||
|
||||
private final boolean simulate;
|
||||
private final boolean updateAllTypes;
|
||||
|
||||
private final List<String> conflicts = new ArrayList<>();
|
||||
private final List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
private final List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
||||
|
||||
public MergeResult(boolean simulate, boolean updateAllTypes) {
|
||||
this.simulate = simulate;
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
}
|
||||
|
||||
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert simulate() == false;
|
||||
newFieldMappers.addAll(fieldMappers);
|
||||
}
|
||||
|
||||
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
assert simulate() == false;
|
||||
newObjectMappers.addAll(objectMappers);
|
||||
}
|
||||
|
||||
public Collection<FieldMapper> getNewFieldMappers() {
|
||||
return newFieldMappers;
|
||||
}
|
||||
|
||||
public Collection<ObjectMapper> getNewObjectMappers() {
|
||||
return newObjectMappers;
|
||||
}
|
||||
|
||||
public boolean simulate() {
|
||||
return simulate;
|
||||
}
|
||||
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
|
||||
public void addConflict(String mergeFailure) {
|
||||
conflicts.add(mergeFailure);
|
||||
}
|
||||
|
||||
public boolean hasConflicts() {
|
||||
return conflicts.isEmpty() == false;
|
||||
}
|
||||
|
||||
public String[] buildConflicts() {
|
||||
return conflicts.toArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
}
|
|
@ -70,4 +70,8 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
|||
*/
|
||||
public abstract void postParse(ParseContext context) throws IOException;
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper merge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
return (MetadataFieldMapper) super.merge(mergeWith, updateAllTypes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -595,7 +595,7 @@ public abstract class ParseContext {
|
|||
if (dynamicMappingsUpdate == null) {
|
||||
dynamicMappingsUpdate = mapper;
|
||||
} else {
|
||||
MapperUtils.merge(dynamicMappingsUpdate, mapper);
|
||||
dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -128,7 +128,7 @@ public class ParsedDocument {
|
|||
if (dynamicMappingsUpdate == null) {
|
||||
dynamicMappingsUpdate = update;
|
||||
} else {
|
||||
MapperUtils.merge(dynamicMappingsUpdate, update);
|
||||
dynamicMappingsUpdate = dynamicMappingsUpdate.merge(update, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -77,8 +77,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
setupFieldType(context);
|
||||
ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (ByteFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -605,11 +605,9 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
this.maxInputLength = fieldMergeWith.maxInputLength;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,8 +123,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
fieldType.setNullValue(nullValue);
|
||||
DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (DateFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -80,8 +80,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
setupFieldType(context);
|
||||
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (DoubleFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -81,8 +81,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
setupFieldType(context);
|
||||
FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (FloatFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -85,8 +85,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (IntegerFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -84,8 +84,7 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
setupFieldType(context);
|
||||
LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (LongFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -183,22 +183,41 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
|||
}
|
||||
|
||||
@Override
|
||||
public void includeInAll(Boolean includeInAll) {
|
||||
protected NumberFieldMapper clone() {
|
||||
return (NumberFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
this.includeInAll = includeInAll;
|
||||
NumberFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void includeInAllIfNotSet(Boolean includeInAll) {
|
||||
public Mapper includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (includeInAll != null && this.includeInAll == null) {
|
||||
this.includeInAll = includeInAll;
|
||||
NumberFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unsetIncludeInAll() {
|
||||
includeInAll = null;
|
||||
public Mapper unsetIncludeInAll() {
|
||||
if (includeInAll != null) {
|
||||
NumberFieldMapper clone = clone();
|
||||
clone.includeInAll = null;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -254,14 +273,10 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
this.includeInAll = nfmMergeWith.includeInAll;
|
||||
if (nfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;
|
||||
|
@ -270,7 +285,6 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
|||
this.coerce = nfmMergeWith.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to we can use a numeric field in a document that is then parsed twice!
|
||||
public abstract static class CustomNumericField extends Field {
|
||||
|
|
|
@ -81,8 +81,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (ShortFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
|
@ -150,8 +149,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
StringFieldMapper fieldMapper = new StringFieldMapper(
|
||||
name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,22 +255,41 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
|
||||
@Override
|
||||
public void includeInAll(Boolean includeInAll) {
|
||||
protected StringFieldMapper clone() {
|
||||
return (StringFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public StringFieldMapper includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
this.includeInAll = includeInAll;
|
||||
StringFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void includeInAllIfNotSet(Boolean includeInAll) {
|
||||
public StringFieldMapper includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (includeInAll != null && this.includeInAll == null) {
|
||||
this.includeInAll = includeInAll;
|
||||
StringFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unsetIncludeInAll() {
|
||||
includeInAll = null;
|
||||
public StringFieldMapper unsetIncludeInAll() {
|
||||
if (includeInAll != null) {
|
||||
StringFieldMapper clone = clone();
|
||||
clone.includeInAll = null;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -359,16 +376,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll;
|
||||
this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost;
|
||||
|
||||
|
@ -81,8 +80,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
|||
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context), context.indexSettings(),
|
||||
analyzer, multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (TokenCountFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -190,15 +188,10 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
@ -388,19 +387,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
BaseGeoPointFieldMapper gpfmMergeWith = (BaseGeoPointFieldMapper) mergeWith;
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gpfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.index.mapper.ContentPath;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
|
||||
|
@ -297,25 +296,20 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
|
||||
GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith;
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
|
||||
}
|
||||
}
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
this.coerce = gpfmMergeWith.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -475,19 +474,14 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
|
||||
GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith;
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gsfm.coerce.explicit()) {
|
||||
this.coerce = gsfm.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
@ -58,11 +57,24 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public interface IncludeInAll {
|
||||
|
||||
void includeInAll(Boolean includeInAll);
|
||||
/**
|
||||
* If {@code includeInAll} is not null then return a copy of this mapper
|
||||
* that will include values in the _all field according to {@code includeInAll}.
|
||||
*/
|
||||
Mapper includeInAll(Boolean includeInAll);
|
||||
|
||||
void includeInAllIfNotSet(Boolean includeInAll);
|
||||
/**
|
||||
* If {@code includeInAll} is not null and not set on this mapper yet, then
|
||||
* return a copy of this mapper that will include values in the _all field
|
||||
* according to {@code includeInAll}.
|
||||
*/
|
||||
Mapper includeInAllIfNotSet(Boolean includeInAll);
|
||||
|
||||
void unsetIncludeInAll();
|
||||
/**
|
||||
* If {@code includeInAll} was already set on this mapper then return a copy
|
||||
* of this mapper that has {@code includeInAll} not set.
|
||||
*/
|
||||
Mapper unsetIncludeInAll();
|
||||
}
|
||||
|
||||
public static final String NAME = "_all";
|
||||
|
@ -309,11 +321,11 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
|
||||
}
|
||||
super.merge(mergeWith, mergeResult);
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
@ -331,7 +330,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
@ -279,13 +278,11 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = indexFieldMapperMergeWith.enabledState;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
@ -371,11 +370,11 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
|
||||
if (Objects.equals(parentType, fieldMergeWith.parentType) == false) {
|
||||
mergeResult.addConflict("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]");
|
||||
throw new IllegalArgumentException("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]");
|
||||
}
|
||||
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
|
@ -383,13 +382,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here
|
||||
if (childJoinFieldType != null) {
|
||||
// TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type.
|
||||
childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, mergeResult.updateAllTypes() == false);
|
||||
childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false);
|
||||
}
|
||||
for (String conflict : conflicts) {
|
||||
mergeResult.addConflict(conflict);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Merge conflicts: " + conflicts);
|
||||
}
|
||||
|
||||
if (active() && mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (active()) {
|
||||
childJoinFieldType = fieldMergeWith.childJoinFieldType.clone();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
|
@ -249,7 +248,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,11 +41,11 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -310,18 +310,20 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
|
||||
if (mergeResult.simulate()) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
if (this.enabled != sourceMergeWith.enabled) {
|
||||
mergeResult.addConflict("Cannot update enabled setting for [_source]");
|
||||
conflicts.add("Cannot update enabled setting for [_source]");
|
||||
}
|
||||
if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) {
|
||||
mergeResult.addConflict("Cannot update includes setting for [_source]");
|
||||
conflicts.add("Cannot update includes setting for [_source]");
|
||||
}
|
||||
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
|
||||
mergeResult.addConflict("Cannot update excludes setting for [_source]");
|
||||
}
|
||||
conflicts.add("Cannot update excludes setting for [_source]");
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
|
@ -258,21 +257,19 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith;
|
||||
if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with
|
||||
if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) {
|
||||
mergeResult.addConflict("_ttl cannot be disabled once it was enabled.");
|
||||
if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with
|
||||
if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) {
|
||||
throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled.");
|
||||
} else {
|
||||
if (!mergeResult.simulate()) {
|
||||
this.enabledState = ttlMergeWith.enabledState;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ttlMergeWith.defaultTTL != -1) {
|
||||
// we never build the default when the field is disabled so we should also not set it
|
||||
// (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster)
|
||||
if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) {
|
||||
if (enabledState == EnabledAttributeMapper.ENABLED) {
|
||||
this.defaultTTL = ttlMergeWith.defaultTTL;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,13 +33,13 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -379,31 +379,32 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith;
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!mergeResult.simulate()) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = timestampFieldMapperMergeWith.enabledState;
|
||||
}
|
||||
} else {
|
||||
if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) {
|
||||
return;
|
||||
}
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
if (defaultTimestamp == null) {
|
||||
mergeResult.addConflict("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||
conflicts.add("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||
} else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) {
|
||||
mergeResult.addConflict("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null");
|
||||
conflicts.add("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null");
|
||||
} else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) {
|
||||
mergeResult.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||
conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||
}
|
||||
if (this.path != null) {
|
||||
if (path.equals(timestampFieldMapperMergeWith.path()) == false) {
|
||||
mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
|
||||
conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
|
||||
}
|
||||
} else if (timestampFieldMapperMergeWith.path() != null) {
|
||||
mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
|
||||
}
|
||||
conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Conflicts: " + conflicts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
@ -225,7 +224,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
@ -225,7 +224,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
@ -166,7 +165,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,8 +122,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
setupFieldType(context);
|
||||
IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (IpFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -160,7 +160,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
context.path().remove();
|
||||
|
||||
ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers, context.indexSettings());
|
||||
objectMapper.includeInAllIfNotSet(includeInAll);
|
||||
objectMapper = objectMapper.includeInAllIfNotSet(includeInAll);
|
||||
|
||||
return (Y) objectMapper;
|
||||
}
|
||||
|
@ -389,41 +389,53 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
}
|
||||
|
||||
@Override
|
||||
public void includeInAll(Boolean includeInAll) {
|
||||
public ObjectMapper includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll == null) {
|
||||
return;
|
||||
return this;
|
||||
}
|
||||
this.includeInAll = includeInAll;
|
||||
|
||||
ObjectMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
// when called from outside, apply this on all the inner mappers
|
||||
for (Mapper mapper : mappers.values()) {
|
||||
for (Mapper mapper : clone.mappers.values()) {
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll);
|
||||
clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll));
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (this.includeInAll == null) {
|
||||
this.includeInAll = includeInAll;
|
||||
public ObjectMapper includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (includeInAll == null || this.includeInAll != null) {
|
||||
return this;
|
||||
}
|
||||
|
||||
ObjectMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
// when called from outside, apply this on all the inner mappers
|
||||
for (Mapper mapper : mappers.values()) {
|
||||
for (Mapper mapper : clone.mappers.values()) {
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
|
||||
clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll));
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unsetIncludeInAll() {
|
||||
includeInAll = null;
|
||||
public ObjectMapper unsetIncludeInAll() {
|
||||
if (includeInAll == null) {
|
||||
return this;
|
||||
}
|
||||
ObjectMapper clone = clone();
|
||||
clone.includeInAll = null;
|
||||
// when called from outside, apply this on all the inner mappers
|
||||
for (Mapper mapper : mappers.values()) {
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
|
||||
clone.putMapper(((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll());
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
public Nested nested() {
|
||||
|
@ -434,14 +446,9 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
return this.nestedTypeFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Put a new mapper.
|
||||
* NOTE: this method must be called under the current {@link DocumentMapper}
|
||||
* lock if concurrent updates are expected.
|
||||
*/
|
||||
public void putMapper(Mapper mapper) {
|
||||
protected void putMapper(Mapper mapper) {
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
|
||||
mapper = ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
|
||||
}
|
||||
mappers = mappers.copyAndPut(mapper.simpleName(), mapper);
|
||||
}
|
||||
|
@ -464,64 +471,43 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(final Mapper mergeWith, final MergeResult mergeResult) {
|
||||
public ObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
if (!(mergeWith instanceof ObjectMapper)) {
|
||||
mergeResult.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
|
||||
return;
|
||||
throw new IllegalArgumentException("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
|
||||
}
|
||||
ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;
|
||||
ObjectMapper merged = clone();
|
||||
merged.doMerge(mergeWithObject, updateAllTypes);
|
||||
return merged;
|
||||
}
|
||||
|
||||
protected void doMerge(final ObjectMapper mergeWith, boolean updateAllTypes) {
|
||||
if (nested().isNested()) {
|
||||
if (!mergeWithObject.nested().isNested()) {
|
||||
mergeResult.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested");
|
||||
return;
|
||||
if (!mergeWith.nested().isNested()) {
|
||||
throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from nested to non-nested");
|
||||
}
|
||||
} else {
|
||||
if (mergeWithObject.nested().isNested()) {
|
||||
mergeResult.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested");
|
||||
return;
|
||||
if (mergeWith.nested().isNested()) {
|
||||
throw new IllegalArgumentException("object mapping [" + name() + "] can't be changed from non-nested to nested");
|
||||
}
|
||||
}
|
||||
|
||||
if (!mergeResult.simulate()) {
|
||||
if (mergeWithObject.dynamic != null) {
|
||||
this.dynamic = mergeWithObject.dynamic;
|
||||
}
|
||||
if (mergeWith.dynamic != null) {
|
||||
this.dynamic = mergeWith.dynamic;
|
||||
}
|
||||
|
||||
doMerge(mergeWithObject, mergeResult);
|
||||
|
||||
List<Mapper> mappersToPut = new ArrayList<>();
|
||||
List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
||||
List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
for (Mapper mapper : mergeWithObject) {
|
||||
Mapper mergeWithMapper = mapper;
|
||||
for (Mapper mergeWithMapper : mergeWith) {
|
||||
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
|
||||
Mapper merged;
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeResult.simulate()) {
|
||||
mappersToPut.add(mergeWithMapper);
|
||||
MapperUtils.collect(mergeWithMapper, newObjectMappers, newFieldMappers);
|
||||
}
|
||||
} else if (mergeIntoMapper instanceof MetadataFieldMapper == false) {
|
||||
// no mapping, simply add it
|
||||
merged = mergeWithMapper;
|
||||
} else {
|
||||
// root mappers can only exist here for backcompat, and are merged in Mapping
|
||||
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
|
||||
merged = mergeIntoMapper.merge(mergeWithMapper, updateAllTypes);
|
||||
}
|
||||
putMapper(merged);
|
||||
}
|
||||
if (!newFieldMappers.isEmpty()) {
|
||||
mergeResult.addFieldMappers(newFieldMappers);
|
||||
}
|
||||
if (!newObjectMappers.isEmpty()) {
|
||||
mergeResult.addObjectMappers(newObjectMappers);
|
||||
}
|
||||
// add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
|
||||
for (Mapper mapper : mappersToPut) {
|
||||
putMapper(mapper);
|
||||
}
|
||||
}
|
||||
|
||||
protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -205,6 +205,14 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
this.numericDetection = numericDetection;
|
||||
}
|
||||
|
||||
/** Return a copy of this mapper that has the given {@code mapper} as a
|
||||
* sub mapper. */
|
||||
public RootObjectMapper copyAndPutMapper(Mapper mapper) {
|
||||
RootObjectMapper clone = (RootObjectMapper) clone();
|
||||
clone.putMapper(mapper);
|
||||
return clone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectMapper mappingUpdate(Mapper mapper) {
|
||||
RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper);
|
||||
|
@ -253,9 +261,14 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) {
|
||||
public RootObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
return (RootObjectMapper) super.merge(mergeWith, updateAllTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(ObjectMapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
// merge them
|
||||
List<DynamicTemplate> mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates));
|
||||
for (DynamicTemplate template : mergeWithObject.dynamicTemplates) {
|
||||
|
@ -272,7 +285,6 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
}
|
||||
this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
|
|
|
@ -110,7 +110,7 @@ public class TranslogRecoveryPerformer {
|
|||
if (currentUpdate == null) {
|
||||
recoveredTypes.put(type, update);
|
||||
} else {
|
||||
MapperUtils.merge(currentUpdate, update);
|
||||
currentUpdate = currentUpdate.merge(update, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
|
@ -39,7 +38,6 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -321,9 +319,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
|
||||
|
||||
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true, false);
|
||||
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
docMapperBefore.merge(docMapperAfter.mapping(), true, false);
|
||||
|
||||
docMapperBefore.merge(docMapperAfter.mapping(), false, false);
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.analysis.*;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -60,13 +59,11 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
stage1.merge(stage2.mapping(), true, false);
|
||||
// Just simulated so merge hasn't happened yet
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
|
||||
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
stage1.merge(stage2.mapping(), false, false);
|
||||
// Just simulated so merge hasn't happened yet
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
|
||||
}
|
||||
|
|
|
@ -371,9 +371,8 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
|
||||
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
|
||||
|
||||
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false, false);
|
||||
defaultMapper.merge(mergeMapper.mapping(), false, false);
|
||||
|
||||
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false));
|
||||
assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class)));
|
||||
|
||||
DateFieldMapper mergedFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field");
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
|
@ -219,7 +218,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// ignore this for now
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
|
@ -66,9 +65,9 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
public void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
if (!(mergeWith instanceof ExternalMetadataMapper)) {
|
||||
mergeResult.addConflict("Trying to merge " + mergeWith + " with " + this);
|
||||
throw new IllegalArgumentException("Trying to merge " + mergeWith + " with " + this);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
|
|
@ -30,17 +30,13 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.isIn;
|
||||
|
||||
public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void testDefaultConfiguration() throws IOException {
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
@ -39,6 +38,7 @@ import java.util.concurrent.CyclicBarrier;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -59,15 +59,12 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
stage1.merge(stage2.mapping(), true, false);
|
||||
// since we are simulating, we should not have the age mapping
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
|
||||
// now merge, don't simulate
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
// there is still merge failures
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
stage1.merge(stage2.mapping(), false, false);
|
||||
// but we have the age in
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue());
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
|
||||
|
@ -83,8 +80,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
|
||||
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||
|
||||
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
mapper.merge(withDynamicMapper.mapping(), false, false);
|
||||
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||
}
|
||||
|
||||
|
@ -99,14 +95,19 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().endObject().string();
|
||||
DocumentMapper nestedMapper = parser.parse(nestedMapping);
|
||||
|
||||
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
|
||||
try {
|
||||
objectMapper.merge(nestedMapper.mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested"));
|
||||
}
|
||||
|
||||
mergeResult = nestedMapper.merge(objectMapper.mapping(), true, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
|
||||
try {
|
||||
nestedMapper.merge(objectMapper.mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testMergeSearchAnalyzer() throws Exception {
|
||||
|
@ -122,9 +123,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper changed = parser.parse(mapping2);
|
||||
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
|
||||
existing.merge(changed.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword"));
|
||||
}
|
||||
|
||||
|
@ -141,9 +141,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper changed = parser.parse(mapping2);
|
||||
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
|
||||
existing.merge(changed.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
|
||||
assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14));
|
||||
}
|
||||
|
|
|
@ -27,15 +27,11 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
@ -62,8 +58,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
|
||||
DocumentMapper docMapper2 = parser.parse(mapping);
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
docMapper.merge(docMapper2.mapping(), true, false);
|
||||
|
||||
docMapper.merge(docMapper2.mapping(), false, false);
|
||||
|
||||
|
@ -84,8 +79,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
|
||||
DocumentMapper docMapper3 = parser.parse(mapping);
|
||||
|
||||
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
docMapper.merge(docMapper3.mapping(), true, false);
|
||||
|
||||
docMapper.merge(docMapper3.mapping(), false, false);
|
||||
|
||||
|
@ -100,8 +94,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json");
|
||||
DocumentMapper docMapper4 = parser.parse(mapping);
|
||||
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
docMapper.merge(docMapper4.mapping(), true, false);
|
||||
|
||||
docMapper.merge(docMapper4.mapping(), false, false);
|
||||
|
||||
|
|
|
@ -34,11 +34,9 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
|||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
|
||||
|
@ -194,13 +192,18 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
|
|||
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
|
||||
DocumentMapper docMapper = parser.parse(mapping1);
|
||||
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
|
||||
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(conflicts));
|
||||
for (String conflict : mergeResult.buildConflicts()) {
|
||||
assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict));
|
||||
if (conflicts.length == 0) {
|
||||
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
} else {
|
||||
try {
|
||||
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
for (String conflict : conflicts) {
|
||||
assertThat(e.getMessage(), containsString(conflict));
|
||||
}
|
||||
}
|
||||
}
|
||||
assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty());
|
||||
}
|
||||
|
||||
public void testEnabledNotUpdateable() throws Exception {
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
|
@ -493,8 +492,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false);
|
||||
assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts());
|
||||
defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false);
|
||||
|
||||
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
|
@ -515,8 +514,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(0));
|
||||
docMapper.merge(parser.parse(mapping).mapping(), false, false);
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
|
||||
}
|
||||
|
@ -618,9 +616,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.field("index", indexValues.remove(randomInt(2)))
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser();
|
||||
MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService();
|
||||
|
||||
DocumentMapper docMapper = parser.parse(mapping);
|
||||
mapperService.merge("type", new CompressedXContent(mapping), true, false);
|
||||
mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
|
@ -628,18 +626,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
||||
List<String> expectedConflicts = new ArrayList<>();
|
||||
expectedConflicts.add("mapper [_timestamp] has different [index] values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different [tokenize] values");
|
||||
if (indexValues.get(0).equals("not_analyzed") == false) {
|
||||
// if the only index value left is not_analyzed, then the doc values setting will be the same, but in the
|
||||
// other two cases, it will change
|
||||
expectedConflicts.add("mapper [_timestamp] has different [doc_values] values");
|
||||
}
|
||||
|
||||
for (String conflict : mergeResult.buildConflicts()) {
|
||||
assertThat(conflict, isIn(expectedConflicts));
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -686,10 +677,15 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException {
|
||||
DocumentMapper docMapper = parser.parse(mapping1);
|
||||
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0 : 1));
|
||||
if (conflict != null) {
|
||||
assertThat(mergeResult.buildConflicts()[0], containsString(conflict));
|
||||
if (conflict == null) {
|
||||
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
} else {
|
||||
try {
|
||||
docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString(conflict));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
|
||||
|
@ -116,9 +115,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl);
|
||||
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
|
||||
|
||||
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false);
|
||||
mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
|
||||
}
|
||||
|
||||
|
@ -141,9 +139,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
|
||||
DocumentMapper updatedMapper = parser.parse(updatedMapping);
|
||||
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
|
||||
}
|
||||
|
||||
|
@ -154,9 +151,13 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
|
||||
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
|
||||
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
try {
|
||||
initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
|
||||
}
|
||||
|
||||
|
@ -189,23 +190,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false);
|
||||
}
|
||||
|
||||
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false);
|
||||
}
|
||||
|
||||
public void testMergeWithOnlyDefaultSet() throws Exception {
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
}
|
||||
|
@ -216,8 +214,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
}
|
||||
|
@ -228,8 +225,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl);
|
||||
CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false);
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
|
||||
|
@ -240,8 +236,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
|
||||
|
@ -252,8 +247,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
|
||||
|
@ -263,8 +257,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
|
||||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
// make sure simulate flag actually worked - mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
|
@ -273,8 +266,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
// check if switching simulate flag off works if nothing was applied in the beginning
|
||||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
// make sure simulate flag actually worked - mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
|
@ -77,9 +76,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
|
||||
// simulate like in MetaDataMappingService#putMapping
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false);
|
||||
// assure we have no conflicts
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(0));
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false);
|
||||
// make sure mappings applied
|
||||
CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string()));
|
||||
|
@ -101,9 +98,12 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
|
||||
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
// simulate like in MetaDataMappingService#putMapping
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false);
|
||||
// assure we have conflicts
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
try {
|
||||
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate));
|
||||
|
|
|
@ -1176,7 +1176,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
.endObject().endObject()).get();
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.toString(), containsString("Merge failed with failures {[The _parent field's type option can't be changed: [null]->[parent]"));
|
||||
assertThat(e.toString(), containsString("The _parent field's type option can't be changed: [null]->[parent]"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -602,7 +602,7 @@ public class AttachmentMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
// ignore this for now
|
||||
}
|
||||
|
||||
|
|
|
@ -66,8 +66,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
|||
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
return (Murmur3FieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
|
@ -177,12 +176,10 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) {
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = sizeFieldMapperMergeWith.enabledState;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue