Mappings: Join MergeResults with MergeContext since they are almost the same

MergeContext currently exists to store conflicts, and providing
a mechanism to add dynamic fields. MergeResults store the same
conflicts. This change merges the two classes together, as well
as removes the MergeFlags construct.

This is in preparation for simplifying the callback structures
to dynamically add fields, which will require storing the mapping
updates in the results, instead of having a sneaky callback to
the DocumentMapper instance. It also just makes more sense that
the "results" of a merge are conflicts that occurred, along with
updates that may have occurred. For MergeFlags, any future needs
for parameterizing the merge (which seems unlikely) can just be
added directly to the MergeResults as simlulate is with this change.
This commit is contained in:
Ryan Ernst 2015-04-23 19:24:38 -07:00
parent 31dc26ec42
commit 4d672b0369
59 changed files with 313 additions and 370 deletions

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidTypeNameException;
@ -48,8 +49,6 @@ import org.elasticsearch.percolator.PercolatorService;
import java.util.*;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
/**
* Service responsible for submitting mapping changes
*/
@ -382,10 +381,10 @@ public class MetaDataMappingService extends AbstractComponent {
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null);
if (existingMapper != null) {
// first, simulate
DocumentMapper.MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), mergeFlags().simulate(true));
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true);
// if we have conflicts, and we are not supposed to ignore them, throw an exception
if (!request.ignoreConflicts() && mergeResult.hasConflicts()) {
throw new MergeMappingException(mergeResult.conflicts());
throw new MergeMappingException(mergeResult.buildConflicts());
}
}
}

View File

@ -55,7 +55,7 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper<?>> {
this.searchQuoteAnalyzer = searchQuoteAnalyzer;
}
public DocumentFieldMappers copyAndAllAll(Collection<? extends FieldMapper<?>> newMappers) {
public DocumentFieldMappers copyAndAllAll(Collection<FieldMapper<?>> newMappers) {
FieldMappersLookup fieldMappers = this.fieldMappers.copyAndAddAll(newMappers);
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper<?>, Map.Entry<String, Analyzer>>() {
@Override

View File

@ -88,56 +88,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
*/
public class DocumentMapper implements ToXContent {
/**
* A result of a merge.
*/
public static class MergeResult {
private final String[] conflicts;
public MergeResult(String[] conflicts) {
this.conflicts = conflicts;
}
/**
* Does the merge have conflicts or not?
*/
public boolean hasConflicts() {
return conflicts.length > 0;
}
/**
* The merge conflicts.
*/
public String[] conflicts() {
return this.conflicts;
}
}
public static class MergeFlags {
public static MergeFlags mergeFlags() {
return new MergeFlags();
}
private boolean simulate = true;
public MergeFlags() {
}
/**
* A simulation run, don't perform actual modifications to the mapping.
*/
public boolean simulate() {
return simulate;
}
public MergeFlags simulate(boolean simulate) {
this.simulate = simulate;
return this;
}
}
/**
* A listener to be called during the parse process.
*/
@ -579,7 +529,7 @@ public class DocumentMapper implements ToXContent {
return parser.contentType().xContent().createParser(builder.bytes());
}
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
public void addFieldMappers(Collection<FieldMapper<?>> fieldMappers) {
synchronized (mappersMutex) {
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
}
@ -629,20 +579,20 @@ public class DocumentMapper implements ToXContent {
mapping.root.traverse(listener);
}
private MergeContext newMergeContext(MergeFlags mergeFlags) {
return new MergeContext(mergeFlags) {
private MergeResult newMergeContext(boolean simulate) {
return new MergeResult(simulate) {
List<String> conflicts = new ArrayList<>();
@Override
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
assert mergeFlags().simulate() == false;
public void addFieldMappers(Collection<FieldMapper<?>> fieldMappers) {
assert simulate() == false;
DocumentMapper.this.addFieldMappers(fieldMappers);
}
@Override
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
assert mergeFlags().simulate() == false;
assert simulate() == false;
DocumentMapper.this.addObjectMappers(objectMappers);
}
@ -664,10 +614,10 @@ public class DocumentMapper implements ToXContent {
};
}
public synchronized MergeResult merge(Mapping mapping, MergeFlags mergeFlags) {
final MergeContext mergeContext = newMergeContext(mergeFlags);
final MergeResult mergeResult = this.mapping.merge(mapping, mergeContext);
if (mergeFlags.simulate() == false) {
public synchronized MergeResult merge(Mapping mapping, boolean simulate) {
final MergeResult mergeResult = newMergeContext(simulate);
this.mapping.merge(mapping, mergeResult);
if (simulate == false) {
refreshSource();
}
return mergeResult;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
@ -38,7 +39,7 @@ public abstract class FieldMapperListener {
public abstract void fieldMapper(FieldMapper<?> fieldMapper);
public void fieldMappers(List<FieldMapper<?>> fieldMappers) {
public void fieldMappers(Collection<FieldMapper<?>> fieldMappers) {
for (FieldMapper<?> mapper : fieldMappers) {
fieldMapper(mapper);
}

View File

@ -49,7 +49,7 @@ class FieldMappersLookup implements Iterable<FieldMapper<?>> {
/**
* Return a new instance that contains the union of this instance and the provided mappers.
*/
public FieldMappersLookup copyAndAddAll(Collection<? extends FieldMapper<?>> newMappers) {
public FieldMappersLookup copyAndAddAll(Collection<FieldMapper<?>> newMappers) {
CopyOnWriteHashMap<String, FieldMappers> map = this.mappers;
for (FieldMapper<?> mapper : newMappers) {

View File

@ -132,7 +132,7 @@ public interface Mapper extends ToXContent {
*/
Mapper parse(ParseContext context) throws IOException;
void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException;
void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException;
void traverse(FieldMapperListener fieldMapperListener);

View File

@ -79,7 +79,6 @@ import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
/**
*
@ -336,11 +335,11 @@ public class MapperService extends AbstractIndexComponent {
DocumentMapper oldMapper = mappers.get(mapper.type());
if (oldMapper != null) {
DocumentMapper.MergeResult result = oldMapper.merge(mapper.mapping(), mergeFlags().simulate(false));
MergeResult result = oldMapper.merge(mapper.mapping(), false);
if (result.hasConflicts()) {
// TODO: What should we do???
if (logger.isDebugEnabled()) {
logger.debug("merging mapping for type [{}] resulted in conflicts: [{}]", mapper.type(), Arrays.toString(result.conflicts()));
logger.debug("merging mapping for type [{}] resulted in conflicts: [{}]", mapper.type(), Arrays.toString(result.buildConflicts()));
}
}
fieldDataService.onMappingUpdate();
@ -385,7 +384,7 @@ public class MapperService extends AbstractIndexComponent {
}
}
private void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
private void addFieldMappers(Collection<FieldMapper<?>> fieldMappers) {
synchronized (mappersMutex) {
this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers);
}
@ -933,7 +932,7 @@ public class MapperService extends AbstractIndexComponent {
}
@Override
public void fieldMappers(List<FieldMapper<?>> fieldMappers) {
public void fieldMappers(Collection<FieldMapper<?>> fieldMappers) {
addFieldMappers(fieldMappers);
}
}

View File

@ -24,7 +24,6 @@ import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
public enum MapperUtils {
;
@ -42,8 +41,8 @@ public enum MapperUtils {
return mapper;
}
private static MergeContext newStrictMergeContext() {
return new MergeContext(new DocumentMapper.MergeFlags().simulate(false)) {
private static MergeResult newStrictMergeContext() {
return new MergeResult(false) {
@Override
public boolean hasConflicts() {
@ -61,7 +60,7 @@ public enum MapperUtils {
}
@Override
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
public void addFieldMappers(Collection<FieldMapper<?>> fieldMappers) {
// no-op
}

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper.MergeResult;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
@ -95,11 +94,11 @@ public final class Mapping implements ToXContent {
return (T) rootMappersMap.get(clazz);
}
/** @see DocumentMapper#merge(DocumentMapper, org.elasticsearch.index.mapper.DocumentMapper.MergeFlags) */
public MergeResult merge(Mapping mergeWith, MergeContext mergeContext) {
/** @see DocumentMapper#merge(Mapping, boolean) */
public void merge(Mapping mergeWith, MergeResult mergeResult) {
assert rootMappers.length == mergeWith.rootMappers.length;
root.merge(mergeWith.root, mergeContext);
root.merge(mergeWith.root, mergeResult);
for (RootMapper rootMapper : rootMappers) {
// root mappers included in root object will get merge in the rootObjectMapper
if (rootMapper.includeInObject()) {
@ -107,15 +106,14 @@ public final class Mapping implements ToXContent {
}
RootMapper mergeWithRootMapper = mergeWith.rootMapper(rootMapper.getClass());
if (mergeWithRootMapper != null) {
rootMapper.merge(mergeWithRootMapper, mergeContext);
rootMapper.merge(mergeWithRootMapper, mergeResult);
}
}
if (mergeContext.mergeFlags().simulate() == false) {
if (mergeResult.simulate() == false) {
// let the merge with attributes to override the attributes
meta = mergeWith.meta;
}
return new MergeResult(mergeContext.buildConflicts());
}
@Override

View File

@ -21,26 +21,25 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
*
*/
public abstract class MergeContext {
/** A container for tracking results of a mapping merge. */
public abstract class MergeResult {
private final DocumentMapper.MergeFlags mergeFlags;
private final boolean simulate;
public MergeContext(DocumentMapper.MergeFlags mergeFlags) {
this.mergeFlags = mergeFlags;
public MergeResult(boolean simulate) {
this.simulate = simulate;
}
public abstract void addFieldMappers(List<FieldMapper<?>> fieldMappers);
public abstract void addFieldMappers(Collection<FieldMapper<?>> fieldMappers);
public abstract void addObjectMappers(Collection<ObjectMapper> objectMappers);
public DocumentMapper.MergeFlags mergeFlags() {
return mergeFlags;
public boolean simulate() {
return simulate;
}
public abstract void addConflict(String mergeFailure);

View File

@ -39,9 +39,6 @@ import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.index.Terms;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.queries.TermsFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
@ -582,13 +579,13 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeContext.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
mergeResult.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
@ -596,62 +593,62 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE;
if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeContext.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)");
mergeResult.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
}
// null and "default"-named index analyzers both mean the default is used
if (this.indexAnalyzer == null || "default".equals(this.indexAnalyzer.name())) {
if (fieldMergeWith.indexAnalyzer != null && !"default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different analyzer");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null || "default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different analyzer");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different analyzer");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
}
if (!this.names().equals(fieldMergeWith.names())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_name");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index_name");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeContext);
multiFields.merge(mergeWith, mergeResult);
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
// apply changeable values
this.fieldType = new FieldType(this.fieldType);
this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms());
@ -917,7 +914,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper<?>> newFieldMappers = null;
@ -928,7 +925,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
@ -945,13 +942,13 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.addFieldMappers(newFieldMappers);
mergeResult.addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {

View File

@ -45,7 +45,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
@ -245,14 +245,14 @@ public class BinaryFieldMapper extends AbstractFieldMapper<BytesReference> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
BinaryFieldMapper sourceMergeWith = (BinaryFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress;
}

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
@ -237,13 +237,13 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((BooleanFieldMapper) mergeWith).nullValue;
}
}

View File

@ -46,7 +46,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -328,12 +328,12 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((ByteFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((ByteFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -44,7 +44,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
@ -523,22 +523,22 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
if (payloads != fieldMergeWith.payloads) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different payload values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different payload values");
}
if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values");
}
if (preserveSeparators != fieldMergeWith.preserveSeparators) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values");
}
if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values");
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.maxInputLength = fieldMergeWith.maxInputLength;
}
}

View File

@ -53,7 +53,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
@ -494,12 +494,12 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((DateFieldMapper) mergeWith).nullValue;
this.dateTimeFormatter = ((DateFieldMapper) mergeWith).dateTimeFormatter;
}

View File

@ -51,7 +51,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -333,12 +333,12 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((DoubleFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((DoubleFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -52,7 +52,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -339,12 +339,12 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((FloatFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((FloatFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -47,7 +47,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -330,12 +330,12 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((IntegerFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((IntegerFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -47,7 +47,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -312,12 +312,12 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((LongFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((LongFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -53,7 +53,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
@ -370,12 +370,12 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
this.precisionStep = nfmMergeWith.precisionStep;
this.includeInAll = nfmMergeWith.includeInAll;

View File

@ -48,7 +48,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
@ -328,12 +328,12 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((ShortFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((ShortFieldMapper) mergeWith).nullValueAsString;
}

View File

@ -37,7 +37,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
@ -354,12 +354,12 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll;
this.nullValue = ((StringFieldMapper) mergeWith).nullValue;
this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove;

View File

@ -32,7 +32,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost;
@ -189,12 +189,12 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer;
}
}

View File

@ -50,7 +50,7 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ObjectMapperListener;
import org.elasticsearch.index.mapper.ParseContext;
@ -643,39 +643,39 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith;
if (this.enableLatLon != fieldMergeWith.enableLatLon) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different lat_lon");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different lat_lon");
}
if (this.enableGeoHash != fieldMergeWith.enableGeoHash) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash");
}
if (this.geoHashPrecision != fieldMergeWith.geoHashPrecision) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash_precision");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_precision");
}
if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix");
}
if (this.normalizeLat != fieldMergeWith.normalizeLat) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lat");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lat");
}
if (this.normalizeLon != fieldMergeWith.normalizeLon) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lon");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lon");
}
if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different precision_step");
}
if (this.validateLat != fieldMergeWith.validateLat) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lat");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lat");
}
if (this.validateLon != fieldMergeWith.validateLon) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lon");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lon");
}
}

View File

@ -43,7 +43,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
@ -281,10 +281,10 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different field type");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different field type");
return;
}
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
@ -292,7 +292,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
// prevent user from changing strategies
if (!(this.defaultStrategy.getClass().equals(mergeWithStrategy.getClass()))) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different strategy");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different strategy");
}
final SpatialPrefixTree grid = this.defaultStrategy.getGrid();
@ -300,17 +300,17 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
// prevent user from changing trees (changes encoding)
if (!grid.getClass().equals(mergeGrid.getClass())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tree");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision");
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision");
}
// bail if there were merge conflicts
if (mergeContext.hasConflicts() || mergeContext.mergeFlags().simulate()) {
if (mergeResult.hasConflicts() || mergeResult.simulate()) {
return;
}

View File

@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -314,11 +314,11 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
mergeContext.addConflict("mapper [" + names.fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
mergeResult.addConflict("mapper [" + names.fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
}
super.merge(mergeWith, mergeContext);
super.merge(mergeWith, mergeResult);
}
@Override

View File

@ -37,7 +37,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -278,9 +278,9 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper<String> implement
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
FieldNamesFieldMapper fieldNamesMapperMergeWith = (FieldNamesFieldMapper)mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (fieldNamesMapperMergeWith.enabledState != enabledState && !fieldNamesMapperMergeWith.enabledState.unset()) {
this.enabledState = fieldNamesMapperMergeWith.enabledState;
}

View File

@ -49,7 +49,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -361,7 +361,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -34,7 +34,7 @@ import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -216,9 +216,9 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements Int
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) {
this.enabledState = indexFieldMapperMergeWith.enabledState;
}

View File

@ -44,7 +44,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -363,13 +363,13 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (!Objects.equal(type, other.type)) {
mergeContext.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {

View File

@ -33,7 +33,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -242,7 +242,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements I
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -175,9 +175,9 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) {
this.enabledState = sizeFieldMapperMergeWith.enabledState;
}

View File

@ -417,9 +417,9 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress;
}

View File

@ -33,7 +33,7 @@ import org.elasticsearch.index.AlreadyExpiredException;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -238,13 +238,13 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith;
if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with
if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) {
mergeContext.addConflict("_ttl cannot be disabled once it was enabled.");
mergeResult.addConflict("_ttl cannot be disabled once it was enabled.");
} else {
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.enabledState = ttlMergeWith.enabledState;
}
}
@ -252,7 +252,7 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R
if (ttlMergeWith.defaultTTL != -1) {
// we never build the default when the field is disabled so we should also not set it
// (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster)
if (!mergeContext.mergeFlags().simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) {
if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) {
this.defaultTTL = ttlMergeWith.defaultTTL;
}
}

View File

@ -35,7 +35,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -352,10 +352,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith;
super.merge(mergeWith, mergeContext);
if (!mergeContext.mergeFlags().simulate()) {
super.merge(mergeWith, mergeResult);
if (!mergeResult.simulate()) {
if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) {
this.enabledState = timestampFieldMapperMergeWith.enabledState;
}
@ -364,18 +364,18 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
return;
}
if (defaultTimestamp == null) {
mergeContext.addConflict("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
mergeResult.addConflict("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
} else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) {
mergeContext.addConflict("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null");
mergeResult.addConflict("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null");
} else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) {
mergeContext.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
mergeResult.addConflict("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
}
if (this.path != null) {
if (path.equals(timestampFieldMapperMergeWith.path()) == false) {
mergeContext.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
}
} else if (timestampFieldMapperMergeWith.path() != null) {
mergeContext.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
mergeResult.addConflict("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
}
}
}

View File

@ -41,7 +41,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
@ -210,7 +210,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements Inte
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -35,7 +35,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
@ -228,7 +228,7 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements Internal
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -24,14 +24,13 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericDocValuesField;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
@ -163,7 +162,7 @@ public class VersionFieldMapper extends AbstractFieldMapper<Long> implements Int
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// nothing to do
}

View File

@ -48,7 +48,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
@ -320,12 +320,12 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
this.nullValue = ((IpFieldMapper) mergeWith).nullValue;
}
}

View File

@ -49,7 +49,7 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ObjectMapperListener;
import org.elasticsearch.index.mapper.ParseContext;
@ -919,32 +919,32 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Clonea
}
@Override
public void merge(final Mapper mergeWith, final MergeContext mergeContext) throws MergeMappingException {
public void merge(final Mapper mergeWith, final MergeResult mergeResult) throws MergeMappingException {
if (!(mergeWith instanceof ObjectMapper)) {
mergeContext.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
mergeResult.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
return;
}
ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;
if (nested().isNested()) {
if (!mergeWithObject.nested().isNested()) {
mergeContext.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested");
mergeResult.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested");
return;
}
} else {
if (mergeWithObject.nested().isNested()) {
mergeContext.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested");
mergeResult.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested");
return;
}
}
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
if (mergeWithObject.dynamic != null) {
this.dynamic = mergeWithObject.dynamic;
}
}
doMerge(mergeWithObject, mergeContext);
doMerge(mergeWithObject, mergeResult);
List<Mapper> mappersToPut = new ArrayList<>();
FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator();
@ -954,20 +954,20 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Clonea
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
mappersToPut.add(mergeWithMapper);
mergeWithMapper.traverse(newFieldMappers);
mergeWithMapper.traverse(newObjectMappers);
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
}
}
if (!newFieldMappers.mappers.isEmpty()) {
mergeContext.addFieldMappers(newFieldMappers.mappers);
mergeResult.addFieldMappers(newFieldMappers.mappers);
}
if (!newObjectMappers.mappers.isEmpty()) {
mergeContext.addObjectMappers(newObjectMappers.mappers);
mergeResult.addObjectMappers(newObjectMappers.mappers);
}
// add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
for (Mapper mapper : mappersToPut) {
@ -975,7 +975,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Clonea
}
}
protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {
protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) {
}

View File

@ -260,9 +260,9 @@ public class RootObjectMapper extends ObjectMapper {
}
@Override
protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {
protected void doMerge(ObjectMapper mergeWith, MergeResult mergeResult) {
RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (!mergeResult.simulate()) {
// merge them
List<DynamicTemplate> mergedTemplates = Lists.newArrayList(Arrays.asList(this.dynamicTemplates));
for (DynamicTemplate template : mergeWithObject.dynamicTemplates) {

View File

@ -57,7 +57,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
public void testNewField() {
FieldMappersLookup lookup = new FieldMappersLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup2 = lookup.copyAndAddAll(Lists.newArrayList(f));
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f));
assertNull(lookup.fullName("foo"));
assertNull(lookup.indexName("bar"));
@ -76,9 +76,9 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
FieldMappersLookup lookup = new FieldMappersLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FakeFieldMapper other = new FakeFieldMapper("blah", "blah");
lookup = lookup.copyAndAddAll(Lists.newArrayList(f, other));
lookup = lookup.copyAndAddAll(newList(f, other));
FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup2 = lookup.copyAndAddAll(Lists.newArrayList(f2));
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f2));
FieldMappers mappers = lookup2.fullName("foo");
assertNotNull(mappers);
@ -93,7 +93,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
public void testIndexName() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "foo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1));
lookup = lookup.copyAndAddAll(newList(f1));
FieldMappers mappers = lookup.indexName("foo");
assertNotNull(mappers);
@ -105,7 +105,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1, f2));
lookup = lookup.copyAndAddAll(newList(f1, f2));
List<String> names = lookup.simpleMatchToIndexNames("b*");
assertTrue(names.contains("baz"));
assertTrue(names.contains("boo"));
@ -115,7 +115,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1, f2));
lookup = lookup.copyAndAddAll(newList(f1, f2));
List<String> names = lookup.simpleMatchToFullName("b*");
assertTrue(names.contains("foo"));
assertTrue(names.contains("bar"));
@ -126,7 +126,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
FakeFieldMapper f2 = new FakeFieldMapper("foo", "realbar");
FakeFieldMapper f3 = new FakeFieldMapper("baz", "realfoo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1, f2, f3));
lookup = lookup.copyAndAddAll(newList(f1, f2, f3));
assertNotNull(lookup.smartName("foo"));
assertEquals(2, lookup.smartName("foo").mappers().size());
@ -138,7 +138,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1));
lookup = lookup.copyAndAddAll(newList(f1));
try {
Iterator<FieldMapper<?>> itr = lookup.iterator();
@ -154,12 +154,12 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
public void testGetMapper() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(Lists.newArrayList(f1));
lookup = lookup.copyAndAddAll(newList(f1));
assertEquals(f1, lookup.get("foo"));
assertNull(lookup.get("bar")); // get is only by full name
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
lookup = lookup.copyAndAddAll(Lists.newArrayList(f2));
lookup = lookup.copyAndAddAll(newList(f2));
try {
lookup.get("foo");
fail("get should have enforced foo is unique");
@ -168,6 +168,10 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
}
}
static List<FieldMapper<?>> newList(FieldMapper<?>... mapper) {
return Lists.newArrayList(mapper);
}
// this sucks how much must be overriden just do get a dummy field mapper...
static class FakeFieldMapper extends AbstractFieldMapper<String> {
static Settings dummySettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();

View File

@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -45,7 +46,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
@ -229,11 +229,11 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
DocumentMapper.MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), mergeFlags().simulate(true));
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true);
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapperBefore.merge(docMapperAfter.mapping(), mergeFlags().simulate(false));
docMapperBefore.merge(docMapperAfter.mapping(), false);
fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields();

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -32,7 +33,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.equalTo;
/**
@ -64,12 +64,12 @@ public class TokenCountFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(true));
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));

View File

@ -38,6 +38,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -365,9 +366,9 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
DocumentMapper.MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false);
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.conflicts()), mergeResult.hasConflicts(), is(false));
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false));
assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class)));
DateFieldMapper mergedFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field");

View File

@ -33,7 +33,7 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ObjectMapperListener;
import org.elasticsearch.index.mapper.ParseContext;
@ -219,7 +219,7 @@ public class ExternalMapper extends AbstractFieldMapper<Object> {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
// ignore this for now
}

View File

@ -44,9 +44,9 @@ public class ExternalRootMapper implements RootMapper {
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (!(mergeWith instanceof ExternalRootMapper)) {
mergeContext.addConflict("Trying to merge " + mergeWith + " with " + this);
mergeResult.addConflict("Trying to merge " + mergeWith + " with " + this);
}
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -30,7 +31,6 @@ import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
@ -486,11 +486,11 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(2));
assertThat(mergeResult.buildConflicts().length, equalTo(2));
// todo better way of checking conflict?
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts()))));
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -498,7 +498,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("validate", true).field("normalize", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -35,7 +36,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.isIn;
@ -337,11 +337,11 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
// check correct conflicts
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(3));
ArrayList conflicts = new ArrayList<>(Arrays.asList(mergeResult.conflicts()));
assertThat(mergeResult.buildConflicts().length, equalTo(3));
ArrayList conflicts = new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()));
assertThat("mapper [shape] has different strategy", isIn(conflicts));
assertThat("mapper [shape] has different tree", isIn(conflicts));
assertThat("mapper [shape] has different tree_levels or precision", isIn(conflicts));
@ -364,7 +364,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), false);
// verify mapping changes, and ensure no failures
assertThat(mergeResult.hasConflicts(), equalTo(false));

View File

@ -102,7 +102,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);
mapperEnabled.merge(mapperDisabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperEnabled.merge(mapperDisabled.mapping(), false);
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
}
@ -118,7 +118,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), false);
assertThat(enabledMapper.indexMapper().enabled(), is(false));
}

View File

@ -162,11 +162,11 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperEnabled = parser.parse(enabledMapping);
DocumentMapper mapperDisabled = parser.parse(disabledMapping);
mapperEnabled.merge(mapperDisabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperEnabled.merge(mapperDisabled.mapping(), false);
assertFalse(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());
mapperEnabled = parser.parse(enabledMapping);
mapperDisabled.merge(mapperEnabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperDisabled.merge(mapperEnabled.mapping(), false);
assertTrue(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());
}
}

View File

@ -23,12 +23,12 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.*;
/**
@ -51,13 +51,13 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(true));
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// since we are simulating, we should not have the age mapping
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
// now merge, don't simulate
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), false);
// there is still merge failures
assertThat(mergeResult.hasConflicts(), equalTo(false));
// but we have the age in
@ -76,7 +76,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
DocumentMapper.MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), mergeFlags().simulate(false));
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
}
@ -93,14 +93,14 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper nestedMapper = parser.parse(nestedMapping);
DocumentMapper.MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), mergeFlags().simulate(true));
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(1));
assertThat(mergeResult.conflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
assertThat(mergeResult.buildConflicts().length, equalTo(1));
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
mergeResult = nestedMapper.merge(objectMapper.mapping(), mergeFlags().simulate(true));
assertThat(mergeResult.conflicts().length, equalTo(1));
assertThat(mergeResult.conflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
mergeResult = nestedMapper.merge(objectMapper.mapping(), true);
assertThat(mergeResult.buildConflicts().length, equalTo(1));
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
}
@Test
@ -117,7 +117,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace"));
DocumentMapper.MergeResult mergeResult = existing.merge(changed.mapping(), mergeFlags().simulate(false));
MergeResult mergeResult = existing.merge(changed.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("keyword"));
@ -137,7 +137,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace"));
DocumentMapper.MergeResult mergeResult = existing.merge(changed.mapping(), mergeFlags().simulate(false));
MergeResult mergeResult = existing.merge(changed.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("standard"));

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -33,7 +34,6 @@ import java.util.Arrays;
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.*;
/**
@ -62,10 +62,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
DocumentMapper docMapper2 = parser.parse(mapping);
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(false));
docMapper.merge(docMapper2.mapping(), false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -85,10 +85,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
mergeResult = docMapper.merge(docMapper3.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(false));
docMapper.merge(docMapper3.mapping(), false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -103,10 +103,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
mergeResult = docMapper.merge(docMapper4.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(false));
docMapper.merge(docMapper4.mapping(), false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -138,10 +138,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
DocumentMapper docMapper2 = parser.parse(mapping);
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(false));
docMapper.merge(docMapper2.mapping(), false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -161,10 +161,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
mergeResult = docMapper.merge(docMapper3.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(false));
docMapper.merge(docMapper3.mapping(), false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -177,17 +177,17 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.conflicts()[1], equalTo("mapper [name] has different store values"));
mergeResult = docMapper.merge(docMapper4.mapping(), true);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(false));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(true));
mergeResult = docMapper.merge(docMapper4.mapping(), false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(mergeResult.conflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.conflicts()[1], equalTo("mapper [name] has different store values"));
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
// There are conflicts, but the `name.not_indexed3` has been added, b/c that field has no conflicts
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());

View File

@ -114,7 +114,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), false);
assertThat(enabledMapper.SizeFieldMapper().enabled(), is(false));
}
}

View File

@ -41,8 +41,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapper.MergeFlags;
import org.elasticsearch.index.mapper.DocumentMapper.MergeResult;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
@ -500,8 +499,8 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string();
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), MergeFlags.mergeFlags().simulate(false));
assertFalse(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts());
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false);
assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts());
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -515,10 +514,10 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), MergeFlags.mergeFlags());
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true);
assertTrue(mergeResult.hasConflicts());
assertEquals(1, mergeResult.conflicts().length);
assertTrue(mergeResult.conflicts()[0].contains("cannot enable norms"));
assertEquals(1, mergeResult.buildConflicts().length);
assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms"));
}
public void testTermsFilter() throws Exception {

View File

@ -141,7 +141,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), false);
assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false));
}
@ -502,8 +502,8 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(mergeResult.conflicts().length, equalTo(0));
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false);
assertThat(mergeResult.buildConflicts().length, equalTo(0));
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER));
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("array"));
}
@ -576,13 +576,13 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different store values", "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar", "mapper [_timestamp] has different tokenize values"};
for (String conflict : mergeResult.conflicts()) {
for (String conflict : mergeResult.buildConflicts()) {
assertThat(conflict, isIn(expectedConflicts));
}
assertThat(mergeResult.conflicts().length, equalTo(expectedConflicts.length));
assertThat(mergeResult.buildConflicts().length, equalTo(expectedConflicts.length));
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY));
assertTrue(docMapper.timestampFieldMapper().enabled());
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
@ -610,7 +610,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
List<String> expectedConflicts = new ArrayList<>();
expectedConflicts.add("mapper [_timestamp] has different index values");
expectedConflicts.add("mapper [_timestamp] has different tokenize values");
@ -620,7 +620,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
expectedConflicts.add("mapper [_timestamp] has different doc_values values");
}
for (String conflict : mergeResult.conflicts()) {
for (String conflict : mergeResult.buildConflicts()) {
assertThat(conflict, isIn(expectedConflicts));
}
}
@ -671,10 +671,10 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper.refreshSource();
docMapper = parser.parse(docMapper.mappingSource().string());
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
assertThat(mergeResult.conflicts().length, equalTo(conflict == null ? 0:1));
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0:1));
if (conflict != null) {
assertThat(mergeResult.conflicts()[0], containsString(conflict));
assertThat(mergeResult.buildConflicts()[0], containsString(conflict));
}
}

View File

@ -116,8 +116,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl);
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(false);
DocumentMapper.MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), mergeFlags);
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
@ -143,8 +142,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(updatedMapping);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(false);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), mergeFlags);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -158,8 +156,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(true);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), mergeFlags);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -197,7 +194,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean());
assertFalse(mergeResult.hasConflicts());
}
@ -205,7 +202,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean());
assertFalse(mergeResult.hasConflicts());
}
@ -214,7 +211,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -227,7 +224,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
CompressedString mappingAfterCreation = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterCreation, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -241,7 +238,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithTtl);
CompressedString mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -253,7 +250,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -265,7 +262,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -276,7 +273,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -286,7 +283,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
// check if switching simulate flag off works if nothing was applied in the beginning
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -79,9 +80,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
IndexService indexService = createIndex("test", ImmutableSettings.settingsBuilder().build(), "type", mapping);
// simulate like in MetaDataMappingService#putMapping
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), false);
// assure we have no conflicts
assertThat(mergeResult.conflicts().length, equalTo(0));
assertThat(mergeResult.buildConflicts().length, equalTo(0));
// make sure mappings applied
CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string()));
@ -103,9 +104,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("test", ImmutableSettings.settingsBuilder().build(), "type", mapping);
CompressedString mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), true);
// assure we have conflicts
assertThat(mergeResult.conflicts().length, equalTo(1));
assertThat(mergeResult.buildConflicts().length, equalTo(1));
// make sure simulate flag actually worked - no mappings applied
CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate));