mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Merge pull request #11783 from rjernst/refactor/field-type-merge
Move merge simulation of fieldtype settings to fieldtype method
This commit is contained in:
commit
fa8a300f71
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
|
||||
import com.google.common.base.Strings;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
@ -225,6 +226,63 @@ public class MappedFieldType extends FieldType {
|
||||
|
||||
// norelease: we need to override freeze() and add safety checks that all settings are actually set
|
||||
|
||||
/**
|
||||
* Checks for any conflicts between this field type and other.
|
||||
*/
|
||||
public void validateCompatible(MappedFieldType other, List<String> conflicts) {
|
||||
boolean indexed = indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different index values");
|
||||
}
|
||||
if (stored() != other.stored()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store values");
|
||||
}
|
||||
if (hasDocValues() == false && other.hasDocValues()) {
|
||||
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
|
||||
// when the doc_values field data format is configured
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different doc_values values");
|
||||
}
|
||||
if (omitNorms() && !other.omitNorms()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] cannot enable norms (`norms.enabled`)");
|
||||
}
|
||||
if (tokenized() != other.tokenized()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tokenize values");
|
||||
}
|
||||
if (storeTermVectors() != other.storeTermVectors()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector values");
|
||||
}
|
||||
if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_offsets values");
|
||||
}
|
||||
if (storeTermVectorPositions() != other.storeTermVectorPositions()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_positions values");
|
||||
}
|
||||
if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_payloads values");
|
||||
}
|
||||
|
||||
// null and "default"-named index analyzers both mean the default is used
|
||||
if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) {
|
||||
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
}
|
||||
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
} else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
}
|
||||
|
||||
if (!names().equals(other.names())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different index_name");
|
||||
}
|
||||
|
||||
if (Objects.equals(similarity(), other.similarity()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different similarity");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isNumeric() {
|
||||
return false;
|
||||
}
|
||||
|
@ -392,83 +392,22 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
||||
return;
|
||||
}
|
||||
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE;
|
||||
if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different index values");
|
||||
}
|
||||
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store values");
|
||||
}
|
||||
if (!this.fieldType().hasDocValues() && fieldMergeWith.fieldType().hasDocValues()) {
|
||||
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
|
||||
// when the doc_values field data format is configured
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
|
||||
}
|
||||
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType().omitNorms()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] cannot enable norms (`norms.enabled`)");
|
||||
}
|
||||
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tokenize values");
|
||||
}
|
||||
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector values");
|
||||
}
|
||||
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_offsets values");
|
||||
}
|
||||
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_positions values");
|
||||
}
|
||||
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_payloads values");
|
||||
}
|
||||
|
||||
// null and "default"-named index analyzers both mean the default is used
|
||||
if (this.fieldType().indexAnalyzer() == null || "default".equals(this.fieldType().indexAnalyzer().name())) {
|
||||
if (fieldMergeWith.fieldType().indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType().indexAnalyzer().name()) == false) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
|
||||
}
|
||||
} else if (fieldMergeWith.fieldType().indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType().indexAnalyzer().name())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
|
||||
} else if (this.fieldType().indexAnalyzer().name().equals(fieldMergeWith.fieldType().indexAnalyzer().name()) == false) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
|
||||
}
|
||||
|
||||
if (!this.fieldType().names().equals(fieldMergeWith.fieldType().names())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different index_name");
|
||||
}
|
||||
|
||||
if (this.fieldType().similarity() == null) {
|
||||
if (fieldMergeWith.fieldType().similarity() != null) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
|
||||
}
|
||||
} else if (fieldMergeWith.fieldType().similarity() == null) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
|
||||
} else if (!this.fieldType().similarity().equals(fieldMergeWith.fieldType().similarity())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
|
||||
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
|
||||
fieldType().validateCompatible(fieldMergeWith.fieldType(), subConflicts);
|
||||
for (String conflict : subConflicts) {
|
||||
mergeResult.addConflict(conflict);
|
||||
}
|
||||
multiFields.merge(mergeWith, mergeResult);
|
||||
|
||||
if (!mergeResult.simulate()) {
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
// apply changeable values
|
||||
this.fieldType = this.fieldType().clone();
|
||||
this.fieldType().setOmitNorms(fieldMergeWith.fieldType().omitNorms());
|
||||
this.fieldType().setBoost(fieldMergeWith.fieldType().boost());
|
||||
this.fieldType().setNormsLoading(fieldMergeWith.fieldType().normsLoading());
|
||||
if (fieldMergeWith.fieldType().searchAnalyzer() != null) {
|
||||
this.fieldType().setSearchAnalyzer(fieldMergeWith.fieldType().searchAnalyzer());
|
||||
}
|
||||
this.fieldType = fieldMergeWith.fieldType().clone();
|
||||
this.fieldType().freeze();
|
||||
if (fieldMergeWith.customFieldDataSettings != null) {
|
||||
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
this.fieldType().setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
|
||||
Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
|
||||
));
|
||||
}
|
||||
}
|
||||
this.fieldType().setNullValue(fieldMergeWith.fieldType().nullValue());
|
||||
this.fieldType().freeze();
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
}
|
||||
}
|
||||
|
@ -242,6 +242,24 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
return new CompletionFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateCompatible(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.validateCompatible(fieldType, conflicts);
|
||||
CompletionFieldType other = (CompletionFieldType)fieldType;
|
||||
if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different payload values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreservePositionsIncrements() != other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'preserve_position_increments' values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreserveSep() != other.analyzingSuggestLookupProvider.getPreserveSep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'preserve_separators' values");
|
||||
}
|
||||
if(!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'context_mapping' values");
|
||||
}
|
||||
}
|
||||
|
||||
public void setProvider(AnalyzingCompletionLookupProvider provider) {
|
||||
checkIfFrozen();
|
||||
this.analyzingSuggestLookupProvider = provider;
|
||||
@ -535,18 +553,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
|
||||
if (fieldType().analyzingSuggestLookupProvider.hasPayloads() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different payload values");
|
||||
}
|
||||
if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'preserve_position_increments' values");
|
||||
}
|
||||
if (fieldType().analyzingSuggestLookupProvider.getPreserveSep() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'preserve_separators' values");
|
||||
}
|
||||
if(!ContextMapping.mappingsAreEqual(fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'context_mapping' values");
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.maxInputLength = fieldMergeWith.maxInputLength;
|
||||
}
|
||||
|
@ -491,19 +491,6 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.fieldType = this.fieldType.clone();
|
||||
fieldType().setDateTimeFormatter(((DateFieldMapper) mergeWith).fieldType().dateTimeFormatter());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
|
@ -319,9 +319,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
||||
this.fieldType = this.fieldType().clone();
|
||||
this.fieldType().setNumericPrecisionStep(nfmMergeWith.fieldType().numericPrecisionStep());
|
||||
this.fieldType().freeze();
|
||||
this.includeInAll = nfmMergeWith.includeInAll;
|
||||
if (nfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;
|
||||
|
@ -330,6 +330,40 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
||||
public int hashCode() {
|
||||
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, lonFieldType, validateLon, validateLat, normalizeLon, normalizeLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateCompatible(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.validateCompatible(fieldType, conflicts);
|
||||
GeoPointFieldType other = (GeoPointFieldType)fieldType;
|
||||
if (isLatLonEnabled() != other.isLatLonEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different lat_lon");
|
||||
}
|
||||
if (isGeohashEnabled() != other.isGeohashEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash");
|
||||
}
|
||||
if (geohashPrecision() != other.geohashPrecision()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_precision");
|
||||
}
|
||||
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
||||
}
|
||||
if (normalizeLat() != other.normalizeLat()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lat");
|
||||
}
|
||||
if (normalizeLon() != other.normalizeLon()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lon");
|
||||
}
|
||||
if (isLatLonEnabled() &&
|
||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
||||
}
|
||||
if (validateLat() != other.validateLat()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lat");
|
||||
}
|
||||
if (validateLon() != other.validateLon()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lon");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isGeohashEnabled() {
|
||||
return geohashFieldType != null;
|
||||
@ -718,44 +752,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith;
|
||||
|
||||
if (this.fieldType().isLatLonEnabled() != fieldMergeWith.fieldType().isLatLonEnabled()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different lat_lon");
|
||||
}
|
||||
if (this.fieldType().isGeohashEnabled() != fieldMergeWith.fieldType().isGeohashEnabled()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash");
|
||||
}
|
||||
if (this.fieldType().geohashPrecision() != fieldMergeWith.fieldType().geohashPrecision()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash_precision");
|
||||
}
|
||||
if (this.fieldType().isGeohashPrefixEnabled() != fieldMergeWith.fieldType().isGeohashPrefixEnabled()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash_prefix");
|
||||
}
|
||||
if (this.fieldType().normalizeLat() != fieldMergeWith.fieldType().normalizeLat()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different normalize_lat");
|
||||
}
|
||||
if (this.fieldType().normalizeLon() != fieldMergeWith.fieldType().normalizeLon()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different normalize_lon");
|
||||
}
|
||||
if (fieldType().isLatLonEnabled() &&
|
||||
this.fieldType().latFieldType().numericPrecisionStep() != fieldMergeWith.fieldType().latFieldType().numericPrecisionStep()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different precision_step");
|
||||
}
|
||||
if (this.fieldType().validateLat() != fieldMergeWith.fieldType().validateLat()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different validate_lat");
|
||||
}
|
||||
if (this.fieldType().validateLon() != fieldMergeWith.fieldType().validateLon()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different validate_lon");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
List<Mapper> extras = new ArrayList<>();
|
||||
|
@ -246,6 +246,30 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
||||
termStrategy.setDistErrPct(distanceErrorPct());
|
||||
defaultStrategy = resolveStrategy(strategyName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateCompatible(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.validateCompatible(fieldType, conflicts);
|
||||
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
|
||||
// prevent user from changing strategies
|
||||
if (strategyName().equals(other.strategyName()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different strategy");
|
||||
}
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (tree().equals(other.tree()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tree");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (treeLevels() != other.treeLevels()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tree_levels");
|
||||
}
|
||||
if (precisionInMeters() != other.precisionInMeters()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different precision");
|
||||
}
|
||||
}
|
||||
|
||||
private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
|
||||
if (treeLevels > 0 || precisionInMeters >= 0) {
|
||||
@ -379,48 +403,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different field type");
|
||||
return;
|
||||
}
|
||||
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
|
||||
|
||||
// prevent user from changing strategies
|
||||
if (fieldType().strategyName().equals(fieldMergeWith.fieldType().strategyName()) == false) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different strategy");
|
||||
}
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (fieldType().tree().equals(fieldMergeWith.fieldType().tree()) == false) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tree");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (fieldType().treeLevels() != fieldMergeWith.fieldType().treeLevels()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tree_levels");
|
||||
}
|
||||
if (fieldType().precisionInMeters() != fieldMergeWith.fieldType().precisionInMeters()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different precision");
|
||||
}
|
||||
|
||||
// bail if there were merge conflicts
|
||||
if (mergeResult.hasConflicts() || mergeResult.simulate()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// change distance error percent
|
||||
this.fieldType = fieldType().clone();
|
||||
this.fieldType().setDistanceErrorPct(fieldMergeWith.fieldType().distanceErrorPct());
|
||||
// change orientation - this is allowed because existing dateline spanning shapes
|
||||
// have already been unwound and segmented
|
||||
this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation());
|
||||
fieldType().freeze();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
}
|
||||
|
@ -315,18 +315,6 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
FieldNamesFieldMapper fieldNamesMapperMergeWith = (FieldNamesFieldMapper)mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
if (fieldNamesMapperMergeWith.fieldType().isEnabled() != fieldType().isEnabled()) {
|
||||
this.fieldType = fieldType().clone();
|
||||
fieldType().setEnabled(fieldNamesMapperMergeWith.fieldType().isEnabled());
|
||||
fieldType().freeze();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isGenerated() {
|
||||
return true;
|
||||
|
@ -346,15 +346,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
||||
|
||||
if (!mergeResult.simulate()) {
|
||||
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
|
||||
this.fieldType = this.fieldType().clone();
|
||||
if (fieldMergeWith.customFieldDataSettings != null) {
|
||||
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
this.fieldType().setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
|
||||
builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
|
||||
));
|
||||
}
|
||||
}
|
||||
this.fieldType = fieldMergeWith.fieldType().clone();
|
||||
this.fieldType().freeze();
|
||||
}
|
||||
}
|
||||
|
@ -499,6 +499,6 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
|
||||
.endObject().endObject().string();
|
||||
stage2 = parser.parse(stage2Mapping);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.mapper.update;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Seed;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
@ -182,9 +183,11 @@ public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateTimestamp() throws IOException {
|
||||
@Seed(value = "12345678")
|
||||
public void testUpdateTimestamp() throws Exception {
|
||||
boolean enabled = randomBoolean();
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "no").endObject()
|
||||
.startObject("_timestamp").field("enabled", enabled).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "no").endObject()
|
||||
.endObject().endObject();
|
||||
client().admin().indices().prepareCreate("test").addMapping("type", mapping).get();
|
||||
GetMappingsResponse appliedMappings = client().admin().indices().prepareGetMappings("test").get();
|
||||
@ -193,7 +196,7 @@ public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest {
|
||||
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("loading"), equalTo("lazy"));
|
||||
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("format"), equalTo("doc_values"));
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "no").endObject()
|
||||
.startObject("_timestamp").field("enabled", enabled).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "no").endObject()
|
||||
.endObject().endObject();
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
|
||||
appliedMappings = client().admin().indices().prepareGetMappings("test").get();
|
||||
|
Loading…
x
Reference in New Issue
Block a user