Merge pull request #13206 from rjernst/mappings_broken
Fix numerous checks for equality and compatibility in mapper field types
This commit is contained in:
commit
768bfb7d1d
|
@ -22,6 +22,8 @@ package org.elasticsearch.index.analysis;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Named analyzer is an analyzer wrapper around an actual analyzer ({@link #analyzer} that is associated
|
||||
* with a name ({@link #name()}.
|
||||
|
@ -104,4 +106,17 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
throw new IllegalStateException("NamedAnalyzer cannot be wrapped with a wrapper, only a delegator");
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof NamedAnalyzer)) return false;
|
||||
NamedAnalyzer that = (NamedAnalyzer) o;
|
||||
return Objects.equals(name, that.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -192,13 +192,24 @@ public abstract class MappedFieldType extends FieldType {
|
|||
public boolean equals(Object o) {
|
||||
if (!super.equals(o)) return false;
|
||||
MappedFieldType fieldType = (MappedFieldType) o;
|
||||
// check similarity first because we need to check the name, and it might be null
|
||||
// TODO: SimilarityProvider should have equals?
|
||||
if (similarity == null || fieldType.similarity == null) {
|
||||
if (similarity != fieldType.similarity) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (Objects.equals(similarity.name(), fieldType.similarity.name()) == false) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return boost == fieldType.boost &&
|
||||
docValues == fieldType.docValues &&
|
||||
Objects.equals(names, fieldType.names) &&
|
||||
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
|
||||
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
|
||||
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
|
||||
Objects.equals(similarity, fieldType.similarity) &&
|
||||
Objects.equals(normsLoading, fieldType.normsLoading) &&
|
||||
Objects.equals(fieldDataType, fieldType.fieldDataType) &&
|
||||
Objects.equals(nullValue, fieldType.nullValue) &&
|
||||
|
@ -207,10 +218,11 @@ public abstract class MappedFieldType extends FieldType {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, similarity, normsLoading, fieldDataType, nullValue, nullValueAsString);
|
||||
return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
|
||||
similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString);
|
||||
}
|
||||
|
||||
// norelease: we need to override freeze() and add safety checks that all settings are actually set
|
||||
// norelease: we need to override freeze() and add safety checks that all settings are actually set
|
||||
|
||||
/** Returns the name of this type, as would be specified in mapping properties */
|
||||
public abstract String typeName();
|
||||
|
@ -234,51 +246,48 @@ public abstract class MappedFieldType extends FieldType {
|
|||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different index values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [index] values");
|
||||
}
|
||||
if (stored() != other.stored()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [store] values");
|
||||
}
|
||||
if (hasDocValues() == false && other.hasDocValues()) {
|
||||
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set
|
||||
// when the doc_values field data format is configured
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different doc_values values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [doc_values] values, cannot change from disabled to enabled");
|
||||
}
|
||||
if (omitNorms() && !other.omitNorms()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] cannot enable norms (`norms.enabled`)");
|
||||
}
|
||||
if (tokenized() != other.tokenized()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tokenize values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [omit_norms] values, cannot change from disable to enabled");
|
||||
}
|
||||
if (storeTermVectors() != other.storeTermVectors()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector] values");
|
||||
}
|
||||
if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_offsets values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_offsets] values");
|
||||
}
|
||||
if (storeTermVectorPositions() != other.storeTermVectorPositions()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_positions values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_positions] values");
|
||||
}
|
||||
if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different store_term_vector_payloads values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_payloads] values");
|
||||
}
|
||||
|
||||
// null and "default"-named index analyzers both mean the default is used
|
||||
if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) {
|
||||
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
|
||||
}
|
||||
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
|
||||
} else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different analyzer");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
|
||||
}
|
||||
|
||||
if (!names().indexName().equals(other.names().indexName())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different index_name");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [index_name]");
|
||||
}
|
||||
if (Objects.equals(similarity(), other.similarity()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different similarity");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [similarity]");
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
|
@ -289,11 +298,14 @@ public abstract class MappedFieldType extends FieldType {
|
|||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
|
||||
}
|
||||
if (normsLoading() != other.normsLoading()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms].loading across all types.");
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types.");
|
||||
}
|
||||
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
|
||||
}
|
||||
if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types.");
|
||||
}
|
||||
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
|
||||
}
|
||||
|
|
|
@ -134,6 +134,15 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
BinaryFieldType other = (BinaryFieldType)fieldType;
|
||||
if (tryUncompressing() != other.tryUncompressing()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [try_uncompressing] (IMPOSSIBLE)");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean tryUncompressing() {
|
||||
return tryUncompressing;
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
|
||||
|
@ -237,6 +238,27 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
this.contextMapping = ref.contextMapping;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof CompletionFieldType)) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
CompletionFieldType fieldType = (CompletionFieldType) o;
|
||||
return analyzingSuggestLookupProvider.getPreserveSep() == fieldType.analyzingSuggestLookupProvider.getPreserveSep() &&
|
||||
analyzingSuggestLookupProvider.getPreservePositionsIncrements() == fieldType.analyzingSuggestLookupProvider.getPreservePositionsIncrements() &&
|
||||
analyzingSuggestLookupProvider.hasPayloads() == fieldType.analyzingSuggestLookupProvider.hasPayloads() &&
|
||||
Objects.equals(getContextMapping(), fieldType.getContextMapping());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(),
|
||||
analyzingSuggestLookupProvider.getPreserveSep(),
|
||||
analyzingSuggestLookupProvider.getPreservePositionsIncrements(),
|
||||
analyzingSuggestLookupProvider.hasPayloads(),
|
||||
getContextMapping());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletionFieldType clone() {
|
||||
return new CompletionFieldType(this);
|
||||
|
@ -252,16 +274,16 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
CompletionFieldType other = (CompletionFieldType)fieldType;
|
||||
if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different payload values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [payload] values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreservePositionsIncrements() != other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'preserve_position_increments' values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_position_increments] values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreserveSep() != other.analyzingSuggestLookupProvider.getPreserveSep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'preserve_separators' values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_separators] values");
|
||||
}
|
||||
if(!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different 'context_mapping' values");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [context_mapping] values");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -350,20 +350,26 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
GeoPointFieldType other = (GeoPointFieldType)fieldType;
|
||||
if (isLatLonEnabled() != other.isLatLonEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different lat_lon");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
|
||||
}
|
||||
if (isGeohashEnabled() != other.isGeohashEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
|
||||
}
|
||||
if (geohashPrecision() != other.geohashPrecision()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_precision");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
|
||||
}
|
||||
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
|
||||
}
|
||||
if (isLatLonEnabled() && other.isLatLonEnabled() &&
|
||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
|
||||
}
|
||||
if (ignoreMalformed() != other.ignoreMalformed()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [ignore_malformed]");
|
||||
}
|
||||
if (coerce() != other.coerce()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [coerce]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -280,21 +280,30 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
|
||||
// prevent user from changing strategies
|
||||
if (strategyName().equals(other.strategyName()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different strategy");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [strategy]");
|
||||
}
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (tree().equals(other.tree()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tree");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [tree]");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (treeLevels() != other.treeLevels()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different tree_levels");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [tree_levels]");
|
||||
}
|
||||
if (precisionInMeters() != other.precisionInMeters()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different precision");
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [precision]");
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
if (orientation() != other.orientation()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types.");
|
||||
}
|
||||
if (distanceErrorPct() != other.distanceErrorPct()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -167,6 +167,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
if (strict) {
|
||||
FieldNamesFieldType other = (FieldNamesFieldType)fieldType;
|
||||
if (isEnabled() != other.isEnabled()) {
|
||||
|
|
|
@ -182,14 +182,14 @@ public class FieldTypeLookupTests extends ESTestCase {
|
|||
lookup.checkCompatibility(newList(f3), false);
|
||||
fail("expected conflict");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("has different store values"));
|
||||
assertTrue(e.getMessage().contains("has different [store] values"));
|
||||
}
|
||||
// even with updateAllTypes == true, incompatible
|
||||
try {
|
||||
lookup.checkCompatibility(newList(f3), true);
|
||||
fail("expected conflict");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("has different store values"));
|
||||
assertTrue(e.getMessage().contains("has different [store] values"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,57 +18,197 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/** Base test case for subclasses of MappedFieldType */
|
||||
public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
|
||||
/** Abstraction for mutating a property of a MappedFieldType */
|
||||
public static abstract class Modifier {
|
||||
/** The name of the property that is being modified. Used in test failure messages. */
|
||||
public final String property;
|
||||
/** true if this modifier only makes types incompatible in strict mode, false otherwise */
|
||||
public final boolean strictOnly;
|
||||
/** true if reversing the order of checkCompatibility arguments should result in the same conflicts, false otherwise **/
|
||||
public final boolean symmetric;
|
||||
|
||||
public Modifier(String property, boolean strictOnly, boolean symmetric) {
|
||||
this.property = property;
|
||||
this.strictOnly = strictOnly;
|
||||
this.symmetric = symmetric;
|
||||
}
|
||||
|
||||
/** Modifies the property */
|
||||
public abstract void modify(MappedFieldType ft);
|
||||
/**
|
||||
* Optional method to implement that allows the field type that will be compared to be modified,
|
||||
* so that it does not have the default value for the property being modified.
|
||||
*/
|
||||
public void normalizeOther(MappedFieldType other) {}
|
||||
}
|
||||
|
||||
private final List<Modifier> modifiers = new ArrayList<>(Arrays.asList(
|
||||
new Modifier("boost", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setBoost(1.1f);
|
||||
}
|
||||
},
|
||||
new Modifier("doc_values", false, false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setHasDocValues(ft.hasDocValues() == false);
|
||||
}
|
||||
},
|
||||
new Modifier("analyzer", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("analyzer", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setIndexAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_analyzer", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_analyzer", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSearchAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_quote_analyzer", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_quote_analyzer", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSearchQuoteAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("similarity", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY));
|
||||
}
|
||||
},
|
||||
new Modifier("similarity", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSimilarity(new BM25SimilarityProvider("bar", Settings.EMPTY));
|
||||
}
|
||||
},
|
||||
new Modifier("norms.loading", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setNormsLoading(MappedFieldType.Loading.LAZY);
|
||||
}
|
||||
},
|
||||
new Modifier("fielddata", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setFieldDataType(new FieldDataType("foo", Settings.builder().put("loading", "eager").build()));
|
||||
}
|
||||
},
|
||||
new Modifier("null_value", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setNullValue(dummyNullValue);
|
||||
}
|
||||
}
|
||||
));
|
||||
|
||||
/**
|
||||
* Add a mutation that will be tested for all expected semantics of equality and compatibility.
|
||||
* These should be added in an @Before method.
|
||||
*/
|
||||
protected void addModifier(Modifier modifier) {
|
||||
modifiers.add(modifier);
|
||||
}
|
||||
|
||||
private Object dummyNullValue = "dummyvalue";
|
||||
|
||||
/** Sets the null value used by the modifier for null value testing. This should be set in an @Before method. */
|
||||
protected void setDummyNullValue(Object value) {
|
||||
dummyNullValue = value;
|
||||
}
|
||||
|
||||
/** Create a default constructed fieldtype */
|
||||
protected abstract MappedFieldType createDefaultFieldType();
|
||||
|
||||
MappedFieldType createNamedDefaultFieldType(String name) {
|
||||
MappedFieldType createNamedDefaultFieldType() {
|
||||
MappedFieldType fieldType = createDefaultFieldType();
|
||||
fieldType.setNames(new MappedFieldType.Names(name));
|
||||
fieldType.setNames(new MappedFieldType.Names("foo"));
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
/** A dummy null value to use when modifying null value */
|
||||
protected Object dummyNullValue() {
|
||||
return "dummyvalue";
|
||||
}
|
||||
|
||||
/** Returns the number of properties that can be modified for the fieldtype */
|
||||
protected int numProperties() {
|
||||
return 10;
|
||||
}
|
||||
|
||||
/** Modifies a property, identified by propNum, on the given fieldtype */
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
switch (propNum) {
|
||||
case 0: ft.setNames(new MappedFieldType.Names("dummy")); break;
|
||||
case 1: ft.setBoost(1.1f); break;
|
||||
case 2: ft.setHasDocValues(!ft.hasDocValues()); break;
|
||||
case 3: ft.setIndexAnalyzer(Lucene.STANDARD_ANALYZER); break;
|
||||
case 4: ft.setSearchAnalyzer(Lucene.STANDARD_ANALYZER); break;
|
||||
case 5: ft.setSearchQuoteAnalyzer(Lucene.STANDARD_ANALYZER); break;
|
||||
case 6: ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY)); break;
|
||||
case 7: ft.setNormsLoading(MappedFieldType.Loading.LAZY); break;
|
||||
case 8: ft.setFieldDataType(new FieldDataType("foo", Settings.builder().put("loading", "eager").build())); break;
|
||||
case 9: ft.setNullValue(dummyNullValue()); break;
|
||||
default: fail("unknown fieldtype property number " + propNum);
|
||||
// TODO: remove this once toString is no longer final on FieldType...
|
||||
protected void assertFieldTypeEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
if (ft1.equals(ft2) == false) {
|
||||
fail("Expected equality, testing property " + property + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2) + "\n");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove this once toString is no longer final on FieldType...
|
||||
protected void assertEquals(int i, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
assertEquals("prop " + i + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2), ft1, ft2);
|
||||
protected void assertFieldTypeNotEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
if (ft1.equals(ft2)) {
|
||||
fail("Expected inequality, testing property " + property + "\nfirst: " + toString(ft1) + "; \nsecond: " + toString(ft2) + "\n");
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertCompatible(String msg, MappedFieldType ft1, MappedFieldType ft2, boolean strict) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts, strict);
|
||||
assertTrue("Found conflicts for " + msg + ": " + conflicts, conflicts.isEmpty());
|
||||
}
|
||||
|
||||
protected void assertNotCompatible(String msg, MappedFieldType ft1, MappedFieldType ft2, boolean strict, String... messages) {
|
||||
assert messages.length != 0;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts, strict);
|
||||
for (String message : messages) {
|
||||
boolean found = false;
|
||||
for (String conflict : conflicts) {
|
||||
if (conflict.contains(message)) {
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
assertTrue("Missing conflict for " + msg + ": [" + message + "] in conflicts " + conflicts, found);
|
||||
}
|
||||
}
|
||||
|
||||
protected String toString(MappedFieldType ft) {
|
||||
|
@ -88,45 +228,50 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testClone() {
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType("foo");
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
MappedFieldType clone = fieldType.clone();
|
||||
assertNotSame(clone, fieldType);
|
||||
assertEquals(clone.getClass(), fieldType.getClass());
|
||||
assertEquals(clone, fieldType);
|
||||
assertEquals(clone, clone.clone()); // transitivity
|
||||
|
||||
for (int i = 0; i < numProperties(); ++i) {
|
||||
fieldType = createNamedDefaultFieldType("foo");
|
||||
modifyProperty(fieldType, i);
|
||||
for (Modifier modifier : modifiers) {
|
||||
fieldType = createNamedDefaultFieldType();
|
||||
modifier.modify(fieldType);
|
||||
clone = fieldType.clone();
|
||||
assertNotSame(clone, fieldType);
|
||||
assertEquals(i, clone, fieldType);
|
||||
assertFieldTypeEquals(modifier.property, clone, fieldType);
|
||||
}
|
||||
}
|
||||
|
||||
public void testEquals() {
|
||||
MappedFieldType ft1 = createNamedDefaultFieldType("foo");
|
||||
MappedFieldType ft2 = createNamedDefaultFieldType("foo");
|
||||
MappedFieldType ft1 = createNamedDefaultFieldType();
|
||||
MappedFieldType ft2 = createNamedDefaultFieldType();
|
||||
assertEquals(ft1, ft1); // reflexive
|
||||
assertEquals(ft1, ft2); // symmetric
|
||||
assertEquals(ft2, ft1);
|
||||
assertEquals(ft1.hashCode(), ft2.hashCode());
|
||||
|
||||
for (int i = 0; i < numProperties(); ++i) {
|
||||
ft2 = createNamedDefaultFieldType("foo");
|
||||
modifyProperty(ft2, i);
|
||||
assertNotEquals(ft1, ft2);
|
||||
assertNotEquals(ft1.hashCode(), ft2.hashCode());
|
||||
for (Modifier modifier : modifiers) {
|
||||
ft1 = createNamedDefaultFieldType();
|
||||
ft2 = createNamedDefaultFieldType();
|
||||
modifier.modify(ft2);
|
||||
assertFieldTypeNotEquals(modifier.property, ft1, ft2);
|
||||
assertNotEquals("hash code for modified property " + modifier.property, ft1.hashCode(), ft2.hashCode());
|
||||
// modify the same property and they are equal again
|
||||
modifier.modify(ft1);
|
||||
assertFieldTypeEquals(modifier.property, ft1, ft2);
|
||||
assertEquals("hash code for modified property " + modifier.property, ft1.hashCode(), ft2.hashCode());
|
||||
}
|
||||
}
|
||||
|
||||
public void testFreeze() {
|
||||
for (int i = 0; i < numProperties(); ++i) {
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType("foo");
|
||||
for (Modifier modifier : modifiers) {
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
fieldType.freeze();
|
||||
try {
|
||||
modifyProperty(fieldType, i);
|
||||
fail("expected already frozen exception for property " + i);
|
||||
modifier.modify(fieldType);
|
||||
fail("expected already frozen exception for property " + modifier.property);
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("already frozen"));
|
||||
}
|
||||
|
@ -134,7 +279,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCheckTypeName() {
|
||||
final MappedFieldType fieldType = createNamedDefaultFieldType("foo");
|
||||
final MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
fieldType.checkTypeName(fieldType, conflicts);
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
@ -164,4 +309,46 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
assertTrue(conflicts.get(0).contains("cannot be changed from type"));
|
||||
assertEquals(1, conflicts.size());
|
||||
}
|
||||
|
||||
public void testCheckCompatibility() {
|
||||
MappedFieldType ft1 = createNamedDefaultFieldType();
|
||||
MappedFieldType ft2 = createNamedDefaultFieldType();
|
||||
assertCompatible("default", ft1, ft2, true);
|
||||
assertCompatible("default", ft1, ft2, false);
|
||||
assertCompatible("default", ft2, ft1, true);
|
||||
assertCompatible("default", ft2, ft1, false);
|
||||
|
||||
for (Modifier modifier : modifiers) {
|
||||
ft1 = createNamedDefaultFieldType();
|
||||
ft2 = createNamedDefaultFieldType();
|
||||
modifier.normalizeOther(ft1);
|
||||
modifier.modify(ft2);
|
||||
if (modifier.strictOnly) {
|
||||
String[] conflicts = {
|
||||
"mapper [foo] is used by multiple types",
|
||||
"update [" + modifier.property + "]"
|
||||
};
|
||||
assertCompatible(modifier.property, ft1, ft2, false);
|
||||
assertNotCompatible(modifier.property, ft1, ft2, true, conflicts);
|
||||
assertCompatible(modifier.property, ft2, ft1, false); // always symmetric when not strict
|
||||
if (modifier.symmetric) {
|
||||
assertNotCompatible(modifier.property, ft2, ft1, true, conflicts);
|
||||
} else {
|
||||
assertCompatible(modifier.property, ft2, ft1, true);
|
||||
}
|
||||
} else {
|
||||
// not compatible whether strict or not
|
||||
String conflict = "different [" + modifier.property + "]";
|
||||
assertNotCompatible(modifier.property, ft1, ft2, true, conflict);
|
||||
assertNotCompatible(modifier.property, ft1, ft2, false, conflict);
|
||||
if (modifier.symmetric) {
|
||||
assertNotCompatible(modifier.property, ft2, ft1, true, conflict);
|
||||
assertNotCompatible(modifier.property, ft2, ft1, false, conflict);
|
||||
} else {
|
||||
assertCompatible(modifier.property, ft2, ft1, true);
|
||||
assertCompatible(modifier.property, ft2, ft1, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class BinaryFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
|
@ -28,17 +29,14 @@ public class BinaryFieldTypeTests extends FieldTypeTestCase {
|
|||
return new BinaryFieldMapper.BinaryFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numProperties() {
|
||||
return 1 + super.numProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
BinaryFieldMapper.BinaryFieldType bft = (BinaryFieldMapper.BinaryFieldType)ft;
|
||||
switch (propNum) {
|
||||
case 0: bft.setTryUncompressing(!bft.tryUncompressing()); break;
|
||||
default: super.modifyProperty(ft, propNum - 1);
|
||||
}
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("try_uncompressing", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
BinaryFieldMapper.BinaryFieldType bft = (BinaryFieldMapper.BinaryFieldType)ft;
|
||||
bft.setTryUncompressing(!bft.tryUncompressing());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
|||
return new BooleanFieldMapper.BooleanFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return true;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class ByteFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class ByteFieldTypeTests extends FieldTypeTestCase {
|
|||
return new ByteFieldMapper.ByteFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return (byte)10;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue((byte)10);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,53 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
|
||||
import org.elasticsearch.search.suggest.context.ContextBuilder;
|
||||
import org.elasticsearch.search.suggest.context.ContextMapping;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
public class CompletionFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new CompletionFieldMapper.CompletionFieldType();
|
||||
CompletionFieldMapper.CompletionFieldType ft = new CompletionFieldMapper.CompletionFieldType();
|
||||
ft.setProvider(new AnalyzingCompletionLookupProvider(true, false, true, false));
|
||||
return ft;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("preserve_separators", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
cft.setProvider(new AnalyzingCompletionLookupProvider(false, false, true, false));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("preserve_position_increments", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
cft.setProvider(new AnalyzingCompletionLookupProvider(true, false, false, false));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("payload", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
cft.setProvider(new AnalyzingCompletionLookupProvider(true, false, true, true));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("context_mapping", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
SortedMap<String, ContextMapping> contextMapping = new TreeMap<>();
|
||||
contextMapping.put("foo", ContextBuilder.location("foo").build());
|
||||
cft.setContextMapping(contextMapping);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -31,23 +32,26 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
return new DateFieldMapper.DateFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return 10;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numProperties() {
|
||||
return 2 + super.numProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
DateFieldMapper.DateFieldType dft = (DateFieldMapper.DateFieldType)ft;
|
||||
switch (propNum) {
|
||||
case 0: dft.setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); break;
|
||||
case 1: dft.setTimeUnit(TimeUnit.HOURS); break;
|
||||
default: super.modifyProperty(ft, propNum - 2);
|
||||
}
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10);
|
||||
addModifier(new Modifier("format", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("locale", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("numeric_resolution", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((DateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class DoubleFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class DoubleFieldTypeTests extends FieldTypeTestCase {
|
|||
return new DoubleFieldMapper.DoubleFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return 10.0D;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10.0D);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class FloatFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class FloatFieldTypeTests extends FieldTypeTestCase {
|
|||
return new DoubleFieldMapper.DoubleFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return 10.0;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10.0);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class IntegerFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class IntegerFieldTypeTests extends FieldTypeTestCase {
|
|||
return new IntegerFieldMapper.IntegerFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return 10;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class LongFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class LongFieldTypeTests extends FieldTypeTestCase {
|
|||
return new LongFieldMapper.LongFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return (long)10;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue((long)10);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class ShortFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,8 +28,8 @@ public class ShortFieldTypeTests extends FieldTypeTestCase {
|
|||
return new ShortFieldMapper.ShortFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object dummyNullValue() {
|
||||
return (short)10;
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue((short)10);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -626,9 +626,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(2));
|
||||
// todo better way of checking conflict?
|
||||
assertThat("mapper [point] has different lat_lon", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
||||
assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
||||
assertThat("mapper [point] has different [ignore_malformed]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
||||
|
||||
// correct mapping and ensure no failures
|
||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.junit.Before;
|
||||
|
||||
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -29,20 +30,33 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
|||
return new GeoPointFieldMapper.GeoPointFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numProperties() {
|
||||
return 4 + super.numProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
|
||||
switch (propNum) {
|
||||
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
|
||||
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
|
||||
case 2: gft.setIgnoreMalformed(!gft.ignoreMalformed()); break;
|
||||
case 3: gft.setCoerce(!gft.coerce()); break;
|
||||
default: super.modifyProperty(ft, propNum - 4);
|
||||
}
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("geohash", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoPointFieldMapper.GeoPointFieldType)ft).setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("lat_lon", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType());
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("ignore_malformed", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
|
||||
gft.setIgnoreMalformed(!gft.ignoreMalformed());
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("coerce", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
|
||||
gft.setCoerce(!gft.coerce());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -377,10 +377,10 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(4));
|
||||
ArrayList conflicts = new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()));
|
||||
assertThat("mapper [shape] has different strategy", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different tree", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different tree_levels", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different precision", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different [strategy]", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different [tree]", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different [tree_levels]", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different [precision]", isIn(conflicts));
|
||||
|
||||
// verify nothing changed
|
||||
FieldMapper fieldMapper = stage1.mappers().getMapper("shape");
|
||||
|
|
|
@ -21,31 +21,51 @@ package org.elasticsearch.index.mapper.geo;
|
|||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class GeoShapeFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
GeoShapeFieldMapper.GeoShapeFieldType gft = new GeoShapeFieldMapper.GeoShapeFieldType();
|
||||
gft.setNames(new MappedFieldType.Names("testgeoshape"));
|
||||
return gft;
|
||||
return new GeoShapeFieldMapper.GeoShapeFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numProperties() {
|
||||
return 6 + super.numProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
GeoShapeFieldMapper.GeoShapeFieldType gft = (GeoShapeFieldMapper.GeoShapeFieldType)ft;
|
||||
switch (propNum) {
|
||||
case 0: gft.setTree("quadtree"); break;
|
||||
case 1: gft.setStrategyName("term"); break;
|
||||
case 2: gft.setTreeLevels(10); break;
|
||||
case 3: gft.setPrecisionInMeters(20); break;
|
||||
case 4: gft.setDefaultDistanceErrorPct(0.5); break;
|
||||
case 5: gft.setOrientation(ShapeBuilder.Orientation.LEFT); break;
|
||||
default: super.modifyProperty(ft, propNum - 6);
|
||||
}
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("tree", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTree("quadtree");
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("strategy", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setStrategyName("term");
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("tree_levels", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTreeLevels(10);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("precision", false, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setPrecisionInMeters(20);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("distance_error_pct", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("orientation", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.internal;
|
|||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -27,17 +28,14 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
|
|||
return new FieldNamesFieldMapper.FieldNamesFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numProperties() {
|
||||
return 1 + super.numProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void modifyProperty(MappedFieldType ft, int propNum) {
|
||||
FieldNamesFieldMapper.FieldNamesFieldType fnft = (FieldNamesFieldMapper.FieldNamesFieldType)ft;
|
||||
switch (propNum) {
|
||||
case 0: fnft.setEnabled(!fnft.isEnabled()); break;
|
||||
default: super.modifyProperty(ft, propNum - 1);
|
||||
}
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("enabled", true, true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
FieldNamesFieldMapper.FieldNamesFieldType fnft = (FieldNamesFieldMapper.FieldNamesFieldType)ft;
|
||||
fnft.setEnabled(!fnft.isEnabled());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -173,15 +173,15 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper docMapper4 = parser.parse(mapping);
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
|
||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
|
||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
|
||||
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), false, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
|
||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
|
||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
|
||||
|
||||
// There are conflicts, but the `name.not_indexed3` has been added, b/c that field has no conflicts
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
|
|
@ -515,7 +515,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
|
||||
assertTrue(mergeResult.hasConflicts());
|
||||
assertEquals(1, mergeResult.buildConflicts().length);
|
||||
assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms"));
|
||||
assertTrue(mergeResult.buildConflicts()[0].contains("different [omit_norms]"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -579,11 +579,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
||||
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(
|
||||
"mapper [_timestamp] has different index values",
|
||||
"mapper [_timestamp] has different store values",
|
||||
"mapper [_timestamp] has different [index] values",
|
||||
"mapper [_timestamp] has different [store] values",
|
||||
"Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02",
|
||||
"Cannot update path in _timestamp value. Value is foo path in merged mapping is bar",
|
||||
"mapper [_timestamp] has different tokenize values"));
|
||||
"Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
|
||||
|
||||
for (String conflict : mergeResult.buildConflicts()) {
|
||||
assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict));
|
||||
|
@ -618,12 +617,12 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
||||
List<String> expectedConflicts = new ArrayList<>();
|
||||
expectedConflicts.add("mapper [_timestamp] has different index values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different tokenize values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different [index] values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different [tokenize] values");
|
||||
if (indexValues.get(0).equals("not_analyzed") == false) {
|
||||
// if the only index value left is not_analyzed, then the doc values setting will be the same, but in the
|
||||
// other two cases, it will change
|
||||
expectedConflicts.add("mapper [_timestamp] has different doc_values values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different [doc_values] values");
|
||||
}
|
||||
|
||||
for (String conflict : mergeResult.buildConflicts()) {
|
||||
|
|
|
@ -55,14 +55,14 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
|
||||
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
|
||||
String[] errorMessage = {"[_all] enabled is true now encountering false",
|
||||
"[_all] cannot enable norms (`norms.enabled`)",
|
||||
"[_all] has different store values",
|
||||
"[_all] has different store_term_vector values",
|
||||
"[_all] has different store_term_vector_offsets values",
|
||||
"[_all] has different store_term_vector_positions values",
|
||||
"[_all] has different store_term_vector_payloads values",
|
||||
"[_all] has different analyzer",
|
||||
"[_all] has different similarity"};
|
||||
"[_all] has different [omit_norms] values",
|
||||
"[_all] has different [store] values",
|
||||
"[_all] has different [store_term_vector] values",
|
||||
"[_all] has different [store_term_vector_offsets] values",
|
||||
"[_all] has different [store_term_vector_positions] values",
|
||||
"[_all] has different [store_term_vector_payloads] values",
|
||||
"[_all] has different [analyzer]",
|
||||
"[_all] has different [similarity]"};
|
||||
// fielddata and search_analyzer should not report conflict
|
||||
testConflict(mapping, mappingUpdate, errorMessage);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue