mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
Move merge compatibility logic from MappedFieldType to FieldMapper (#56915)
Merging logic is currently split between FieldMapper, with its merge() method, and MappedFieldType, which checks for merging compatibility. The compatibility checks are called from a third class, MappingMergeValidator. This makes it difficult to reason about what is or is not compatible in updates, and even what is in fact updateable - we have a number of tests that check compatibility on changes in mapping configuration that are not in fact possible. This commit refactors the compatibility logic so that it all sits on FieldMapper, and makes it called at merge time. It adds a new FieldMapperTestCase base class that FieldMapper tests can extend, and moves the compatibility testing machinery from FieldTypeTestCase to here. Relates to #56814
This commit is contained in:
parent
8b9c4eb3e0
commit
18bfbeda29
@ -134,14 +134,6 @@ public class RankFeatureFieldMapper extends FieldMapper {
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
if (positiveScoreImpact != ((RankFeatureFieldType) other).positiveScoreImpact()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [positive_score_impact] values");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
@ -230,4 +222,12 @@ public class RankFeatureFieldMapper extends FieldMapper {
|
||||
builder.field("positive_score_impact", fieldType().positiveScoreImpact());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
RankFeatureFieldType ft = (RankFeatureFieldType) other.fieldType();
|
||||
if (fieldType().positiveScoreImpact != ft.positiveScoreImpact()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [positive_score_impact] values");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -127,6 +128,11 @@ public class RankFeaturesFieldMapper extends FieldMapper {
|
||||
return (RankFeaturesFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public RankFeaturesFieldType fieldType() {
|
||||
return (RankFeaturesFieldType) super.fieldType();
|
||||
|
@ -213,14 +213,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
if (scalingFactor != ((ScaledFloatFieldType) other).getScalingFactor()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [scaling_factor] values");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
if (hasDocValues()) {
|
||||
@ -450,14 +442,17 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
ScaledFloatFieldMapper other = (ScaledFloatFieldMapper) mergeWith;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
ScaledFloatFieldMapper mergeWith = (ScaledFloatFieldMapper) other;
|
||||
ScaledFloatFieldType ft = (ScaledFloatFieldType) other.fieldType();
|
||||
if (fieldType().scalingFactor != ft.getScalingFactor()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [scaling_factor] values");
|
||||
}
|
||||
if (other.coerce.explicit()) {
|
||||
this.coerce = other.coerce;
|
||||
if (mergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = mergeWith.ignoreMalformed;
|
||||
}
|
||||
if (mergeWith.coerce.explicit()) {
|
||||
this.coerce = mergeWith.coerce;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -343,21 +343,6 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
final SearchAsYouTypeFieldType otherFieldType = (SearchAsYouTypeFieldType) other;
|
||||
if (this.shingleFields.length != otherFieldType.shingleFields.length) {
|
||||
conflicts.add("mapper [" + name() + "] has a different [max_shingle_size]");
|
||||
} else if (Arrays.equals(this.shingleFields, otherFieldType.shingleFields) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has shingle subfields that are configured differently");
|
||||
}
|
||||
|
||||
if (Objects.equals(this.prefixField, otherFieldType.prefixField) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_prefixes] settings");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object otherObject) {
|
||||
if (this == otherObject) {
|
||||
@ -488,6 +473,11 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "prefix";
|
||||
@ -515,6 +505,11 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "shingle";
|
||||
@ -613,18 +608,6 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
ShingleFieldType ft = (ShingleFieldType) other;
|
||||
if (ft.shingleSize != this.shingleSize) {
|
||||
conflicts.add("mapper [" + name() + "] has different [shingle_size] values");
|
||||
}
|
||||
if (Objects.equals(this.prefixFieldType, ft.prefixFieldType) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_prefixes] settings");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object otherObject) {
|
||||
if (this == otherObject) {
|
||||
@ -698,18 +681,15 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
SearchAsYouTypeFieldMapper mw = (SearchAsYouTypeFieldMapper) mergeWith;
|
||||
if (mw.maxShingleSize != maxShingleSize) {
|
||||
throw new IllegalArgumentException("mapper [" + name() + "] has different [max_shingle_size] setting, current ["
|
||||
+ this.maxShingleSize + "], merged [" + mw.maxShingleSize + "]");
|
||||
}
|
||||
this.prefixField = (PrefixFieldMapper) this.prefixField.merge(mw.prefixField);
|
||||
|
||||
ShingleFieldMapper[] shingleFieldMappers = new ShingleFieldMapper[mw.shingleFields.length];
|
||||
for (int i = 0; i < shingleFieldMappers.length; i++) {
|
||||
this.shingleFields[i] = (ShingleFieldMapper) this.shingleFields[i].merge(mw.shingleFields[i]);
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
final SearchAsYouTypeFieldMapper m = (SearchAsYouTypeFieldMapper) other;
|
||||
if (this.shingleFields.length != m.shingleFields.length) {
|
||||
conflicts.add("mapper [" + name() + "] has a different [max_shingle_size]");
|
||||
} else {
|
||||
this.prefixField = (PrefixFieldMapper) this.prefixField.merge(m.prefixField);
|
||||
for (int i = 0; i < m.shingleFields.length; i++) {
|
||||
this.shingleFields[i] = (ShingleFieldMapper) this.shingleFields[i].merge(m.shingleFields[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
@ -200,10 +201,9 @@ public class TokenCountFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
this.analyzer = ((TokenCountFieldMapper) mergeWith).analyzer;
|
||||
this.enablePositionIncrements = ((TokenCountFieldMapper) mergeWith).enablePositionIncrements;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
this.analyzer = ((TokenCountFieldMapper) other).analyzer;
|
||||
this.enablePositionIncrements = ((TokenCountFieldMapper) other).enablePositionIncrements;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -38,7 +37,7 @@ import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
public class RankFeatureFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeatureFieldMapper.Builder> {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
@ -47,6 +46,10 @@ public class RankFeatureFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
addModifier("positive_score_impact", false, (a, b) -> {
|
||||
a.fieldType().setPositiveScoreImpact(true);
|
||||
b.fieldType().setPositiveScoreImpact(false);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -63,6 +66,11 @@ public class RankFeatureFieldMapperTests extends ESSingleNodeTestCase {
|
||||
return freq;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RankFeatureFieldMapper.Builder newBuilder() {
|
||||
return new RankFeatureFieldMapper.Builder("rank-feature");
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
|
||||
@ -171,4 +179,5 @@ public class RankFeatureFieldMapperTests extends ESSingleNodeTestCase {
|
||||
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
|
||||
e.getCause().getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -19,31 +19,13 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.junit.Before;
|
||||
|
||||
public class RankFeatureFieldTypeTests extends FieldTypeTestCase {
|
||||
public class RankFeatureFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new RankFeatureFieldMapper.RankFeatureFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("positive_score_impact", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
RankFeatureFieldMapper.RankFeatureFieldType tft = (RankFeatureFieldMapper.RankFeatureFieldType)ft;
|
||||
tft.setPositiveScoreImpact(tft.positiveScoreImpact() == false);
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
super.normalizeOther(other);
|
||||
((RankFeatureFieldMapper.RankFeatureFieldType) other).setPositiveScoreImpact(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testIsAggregatable() {
|
||||
MappedFieldType fieldType = createDefaultFieldType();
|
||||
assertFalse(fieldType.isAggregatable());
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
public class RankFeatureMetaFieldTypeTests extends FieldTypeTestCase {
|
||||
public class RankFeatureMetaFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
public class RankFeaturesFieldTypeTests extends FieldTypeTestCase {
|
||||
public class RankFeaturesFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -41,7 +40,7 @@ import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloatFieldMapper.Builder> {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
@ -50,6 +49,10 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
addModifier("scaling_factor", false, (a, b) -> {
|
||||
a.scalingFactor(10);
|
||||
b.scalingFactor(100);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -57,6 +60,11 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
|
||||
return pluginList(InternalSettingsPlugin.class, MapperExtrasPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScaledFloatFieldMapper.Builder newBuilder() {
|
||||
return new ScaledFloatFieldMapper.Builder("scaled-float").scalingFactor(1);
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "scaled_float")
|
||||
|
@ -29,20 +29,19 @@ import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
||||
public class ScaledFloatFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
@ -51,22 +50,6 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
||||
return ft;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("scaling_factor", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ScaledFloatFieldMapper.ScaledFloatFieldType tft = (ScaledFloatFieldMapper.ScaledFloatFieldType)ft;
|
||||
tft.setScalingFactor(10);
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
super.normalizeOther(other);
|
||||
((ScaledFloatFieldMapper.ScaledFloatFieldType) other).setScalingFactor(100);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
|
||||
ft.setName("scaled_float");
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
@ -43,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldMapper;
|
||||
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldType;
|
||||
@ -55,9 +57,9 @@ import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -78,13 +80,28 @@ import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.core.IsInstanceOf.instanceOf;
|
||||
|
||||
public class SearchAsYouTypeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchAsYouTypeFieldMapper.Builder> {
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier("max_shingle_size", false, (a, b) -> {
|
||||
a.maxShingleSize(3);
|
||||
b.maxShingleSize(2);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(MapperExtrasPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearchAsYouTypeFieldMapper.Builder newBuilder() {
|
||||
return new SearchAsYouTypeFieldMapper.Builder("sayt")
|
||||
.indexAnalyzer(new NamedAnalyzer("analyzer", AnalyzerScope.INDEX, new StandardAnalyzer()))
|
||||
.searchAnalyzer(new NamedAnalyzer("analyzer", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
|
||||
public void testIndexing() throws IOException {
|
||||
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -31,38 +31,16 @@ import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.Defaults;
|
||||
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldType;
|
||||
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.SearchAsYouTypeFieldType;
|
||||
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.ShingleFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase {
|
||||
public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
private static final String NAME = "a_field";
|
||||
private static final String PREFIX_NAME = NAME + "._index_prefix";
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("max_shingle_size", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
SearchAsYouTypeFieldType fieldType = (SearchAsYouTypeFieldType) ft;
|
||||
fieldType.setShingleFields(new ShingleFieldType[] {
|
||||
new ShingleFieldType(fieldType, 2),
|
||||
new ShingleFieldType(fieldType, 3)
|
||||
});
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("index_prefixes", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
SearchAsYouTypeFieldType fieldType = (SearchAsYouTypeFieldType) ft;
|
||||
fieldType.setPrefixField(new PrefixFieldType(NAME, PREFIX_NAME, 1, 10));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearchAsYouTypeFieldType createDefaultFieldType() {
|
||||
final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType();
|
||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Simple field mapper hack to ensure that there is a one and only {@link ParentJoinFieldMapper} per mapping.
|
||||
@ -137,6 +138,11 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
||||
return (MetaJoinFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
throw new IllegalStateException("Should never be called");
|
||||
|
@ -36,7 +36,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
@ -45,6 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
@ -197,11 +197,9 @@ public final class ParentIdFieldMapper extends FieldMapper {
|
||||
context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) mergeWith;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) other;
|
||||
this.children = parentMergeWith.children;
|
||||
}
|
||||
|
||||
|
@ -318,17 +318,14 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
ParentJoinFieldMapper joinMergeWith = (ParentJoinFieldMapper) mergeWith;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
ParentJoinFieldMapper joinMergeWith = (ParentJoinFieldMapper) other;
|
||||
final List<ParentIdFieldMapper> newParentIdFields = new ArrayList<>();
|
||||
for (ParentIdFieldMapper mapper : parentIdFields) {
|
||||
if (joinMergeWith.getParentIdFieldMapper(mapper.getParentName(), true) == null) {
|
||||
conflicts.add("cannot remove parent [" + mapper.getParentName() + "] in join field [" + name() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
final List<ParentIdFieldMapper> newParentIdFields = new ArrayList<>();
|
||||
for (ParentIdFieldMapper mergeWithMapper : joinMergeWith.parentIdFields) {
|
||||
ParentIdFieldMapper self = getParentIdFieldMapper(mergeWithMapper.getParentName(), true);
|
||||
if (self == null) {
|
||||
@ -353,9 +350,6 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
||||
newParentIdFields.add(merged);
|
||||
}
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalStateException("invalid update for join field [" + name() + "]:\n" + conflicts.toString());
|
||||
}
|
||||
this.eagerGlobalOrdinals = joinMergeWith.eagerGlobalOrdinals;
|
||||
this.parentIdFields = Collections.unmodifiableList(newParentIdFields);
|
||||
this.uniqueFieldMapper = (MetaJoinFieldMapper) uniqueFieldMapper.merge(joinMergeWith.uniqueFieldMapper);
|
||||
|
@ -235,7 +235,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
IllegalStateException exc = expectThrows(IllegalStateException.class,
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(exc.getMessage(), containsString("cannot remove parent [parent] in join field [join_field]"));
|
||||
@ -251,7 +251,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
IllegalStateException exc = expectThrows(IllegalStateException.class,
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(exc.getMessage(), containsString("cannot remove child [grand_child2] in join field [join_field]"));
|
||||
@ -268,7 +268,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
IllegalStateException exc = expectThrows(IllegalStateException.class,
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(exc.getMessage(), containsString("cannot create child [parent] from an existing parent"));
|
||||
@ -285,7 +285,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
IllegalStateException exc = expectThrows(IllegalStateException.class,
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(exc.getMessage(), containsString("cannot create parent [grand_child2] from an existing child]"));
|
||||
|
@ -511,6 +511,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
||||
).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
throw new UnsupportedOperationException("should not be invoked");
|
||||
|
@ -23,7 +23,6 @@ import com.ibm.icu.text.Collator;
|
||||
import com.ibm.icu.text.RawCollationKey;
|
||||
import com.ibm.icu.text.RuleBasedCollator;
|
||||
import com.ibm.icu.util.ULocale;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
@ -51,7 +50,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -98,15 +96,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
||||
return super.equals(o) && Objects.equals(collator, ((CollationFieldType) o).collator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType otherFT, List<String> conflicts) {
|
||||
super.checkCompatibility(otherFT, conflicts);
|
||||
CollationFieldType other = (CollationFieldType) otherFT;
|
||||
if (!Objects.equals(collator, other.collator)) {
|
||||
conflicts.add("mapper [" + name() + "] has different [collator]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + Objects.hashCode(collator);
|
||||
@ -612,12 +601,11 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) mergeWith;
|
||||
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) other;
|
||||
if (!Objects.equals(collator, icuMergeWith.collator)) {
|
||||
conflicts.add("mapper [" + name() + "] has different [collator]");
|
||||
}
|
||||
if (!Objects.equals(rules, icuMergeWith.rules)) {
|
||||
conflicts.add("Cannot update rules setting for [" + CONTENT_TYPE + "]");
|
||||
}
|
||||
@ -667,10 +655,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
this.ignoreAbove = icuMergeWith.ignoreAbove;
|
||||
|
||||
if (!conflicts.isEmpty()) {
|
||||
throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -38,7 +38,7 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class CollationFieldTypeTests extends FieldTypeTestCase {
|
||||
public class CollationFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new CollationFieldType();
|
||||
|
@ -37,7 +37,6 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -45,7 +44,9 @@ import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICUCollationKeywordFieldMapper.Builder> {
|
||||
|
||||
private static final String FIELD_TYPE = "icu_collation_keyword";
|
||||
|
||||
@ -54,6 +55,11 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
return Arrays.asList(AnalysisICUPlugin.class, InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ICUCollationKeywordFieldMapper.Builder newBuilder() {
|
||||
return new ICUCollationKeywordFieldMapper.Builder("icu");
|
||||
}
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
@ -61,6 +67,29 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
addModifier("strength", false, (a, b) -> {
|
||||
a.strength("primary");
|
||||
b.strength("secondary");
|
||||
});
|
||||
addModifier("decomposition", false, (a, b) -> {
|
||||
a.decomposition("no");
|
||||
b.decomposition("canonical");
|
||||
});
|
||||
addModifier("alternate", false, (a, b) -> {
|
||||
a.alternate("shifted");
|
||||
b.alternate("non-ignorable");
|
||||
});
|
||||
addBooleanModifier("case_level", false, ICUCollationKeywordFieldMapper.Builder::caseLevel);
|
||||
addModifier("case_first", false, (a, b) -> {
|
||||
a.caseFirst("upper");
|
||||
a.caseFirst("lower");
|
||||
});
|
||||
addBooleanModifier("numeric", false, ICUCollationKeywordFieldMapper.Builder::numeric);
|
||||
addModifier("variable_top", false, (a, b) -> {
|
||||
a.variableTop(";");
|
||||
b.variableTop(":");
|
||||
});
|
||||
addBooleanModifier("hiragana_quaternary_mode", false, ICUCollationKeywordFieldMapper.Builder::hiraganaQuaternaryMode);
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
@ -400,8 +429,7 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));
|
||||
assertEquals("Can't merge because of conflicts: [Cannot update language setting for [" + FIELD_TYPE
|
||||
+ "], Cannot update strength setting for [" + FIELD_TYPE + "]]", e.getMessage());
|
||||
assertThat(e.getMessage(), containsString("mapper [field] has different [collator]"));
|
||||
}
|
||||
|
||||
|
||||
|
@ -568,6 +568,11 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
|
||||
return (AnnotatedTextFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
public int getPositionIncrementGap() {
|
||||
return this.positionIncrementGap;
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase {
|
||||
public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new AnnotatedTextFieldMapper.AnnotatedTextFieldType();
|
||||
|
@ -42,6 +42,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Murmur3FieldMapper extends FieldMapper {
|
||||
@ -169,4 +170,9 @@ public class Murmur3FieldMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -26,8 +26,8 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.EnabledAttributeMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class SizeFieldMapper extends MetadataFieldMapper {
|
||||
@ -176,8 +177,8 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) other;
|
||||
if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = sizeFieldMapperMergeWith.enabledState;
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", XContentType.JSON).execute().actionGet();
|
||||
fail("Expected MergeMappingException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [text], merged_type [integer]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [text] to [integer]"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -282,9 +282,8 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
AbstractGeometryFieldMapper gsfm = (AbstractGeometryFieldMapper)mergeWith;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
AbstractGeometryFieldMapper gsfm = (AbstractGeometryFieldMapper)other;
|
||||
|
||||
if (gsfm.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gsfm.ignoreMalformed;
|
||||
|
@ -122,9 +122,8 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
AbstractPointGeometryFieldMapper gpfm = (AbstractPointGeometryFieldMapper)mergeWith;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
AbstractPointGeometryFieldMapper gpfm = (AbstractPointGeometryFieldMapper)other;
|
||||
if (gpfm.fieldType().nullValue() != null) {
|
||||
this.fieldType().setNullValue(gpfm.fieldType().nullValue());
|
||||
}
|
||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.DeprecatedParame
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
@ -212,17 +213,19 @@ public abstract class AbstractShapeGeometryFieldMapper<Parsed, Processed> extend
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
AbstractShapeGeometryFieldMapper gsfm = (AbstractShapeGeometryFieldMapper)mergeWith;
|
||||
protected final void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
AbstractShapeGeometryFieldMapper gsfm = (AbstractShapeGeometryFieldMapper)other;
|
||||
if (gsfm.coerce.explicit()) {
|
||||
this.coerce = gsfm.coerce;
|
||||
}
|
||||
if (gsfm.orientation.explicit()) {
|
||||
this.orientation = gsfm.orientation;
|
||||
}
|
||||
mergeGeoOptions(gsfm, conflicts);
|
||||
}
|
||||
|
||||
protected abstract void mergeGeoOptions(AbstractShapeGeometryFieldMapper<?,?> mergeWith, List<String> conflicts);
|
||||
|
||||
@Override
|
||||
public void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
|
@ -45,6 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
||||
@ -198,6 +199,11 @@ public class BinaryFieldMapper extends FieldMapper {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
@ -45,6 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
||||
@ -257,6 +258,11 @@ public class BooleanFieldMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
@ -330,24 +330,6 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
CompletionFieldType other = (CompletionFieldType)fieldType;
|
||||
|
||||
if (preservePositionIncrements != other.preservePositionIncrements) {
|
||||
conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values");
|
||||
}
|
||||
if (preserveSep != other.preserveSep) {
|
||||
conflicts.add("mapper [" + name() + "] has different [preserve_separators] values");
|
||||
}
|
||||
if (hasContextMappings() != other.hasContextMappings()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
|
||||
} else if (hasContextMappings() && contextMappings.equals(other.contextMappings) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -665,9 +647,22 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
|
||||
this.maxInputLength = fieldMergeWith.maxInputLength;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
CompletionFieldType c = (CompletionFieldType)other.fieldType();
|
||||
|
||||
if (fieldType().preservePositionIncrements != c.preservePositionIncrements) {
|
||||
conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values");
|
||||
}
|
||||
if (fieldType().preserveSep != c.preserveSep) {
|
||||
conflicts.add("mapper [" + name() + "] has different [preserve_separators] values");
|
||||
}
|
||||
if (fieldType().hasContextMappings() != c.hasContextMappings()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
|
||||
} else if (fieldType().hasContextMappings() && fieldType().contextMappings.equals(c.contextMappings) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
|
||||
}
|
||||
|
||||
this.maxInputLength = ((CompletionFieldMapper)other).maxInputLength;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -345,21 +345,6 @@ public final class DateFieldMapper extends FieldMapper {
|
||||
return resolution.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
DateFieldType other = (DateFieldType) fieldType;
|
||||
if (Objects.equals(dateTimeFormatter.pattern(), other.dateTimeFormatter.pattern()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [format] values");
|
||||
}
|
||||
if (Objects.equals(dateTimeFormatter.locale(), other.dateTimeFormatter.locale()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [locale] values");
|
||||
}
|
||||
if (Objects.equals(resolution.type(), other.resolution.type()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] cannot change between milliseconds and nanoseconds");
|
||||
}
|
||||
}
|
||||
|
||||
public DateFormatter dateTimeFormatter() {
|
||||
return dateTimeFormatter;
|
||||
}
|
||||
@ -655,11 +640,19 @@ public final class DateFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
final DateFieldMapper other = (DateFieldMapper) mergeWith;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
final DateFieldMapper d = (DateFieldMapper) other;
|
||||
if (Objects.equals(fieldType().dateTimeFormatter.pattern(), d.fieldType().dateTimeFormatter.pattern()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [format] values");
|
||||
}
|
||||
if (Objects.equals(fieldType().dateTimeFormatter.locale(), d.fieldType().dateTimeFormatter.locale()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [locale] values");
|
||||
}
|
||||
if (Objects.equals(fieldType().resolution.type(), d.fieldType().resolution.type()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] cannot change between milliseconds and nanoseconds");
|
||||
}
|
||||
if (d.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = d.ignoreMalformed;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -334,32 +334,85 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper merge(Mapper mergeWith) {
|
||||
public final FieldMapper merge(Mapper mergeWith) {
|
||||
FieldMapper merged = clone();
|
||||
merged.doMerge(mergeWith);
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
if (mergeWith instanceof FieldMapper == false) {
|
||||
throw new IllegalArgumentException("mapper [" + fieldType.name() + "] cannot be changed from type ["
|
||||
+ contentType() + "] to [" + mergeWith.getClass().getSimpleName() + "]");
|
||||
}
|
||||
FieldMapper toMerge = (FieldMapper) mergeWith;
|
||||
merged.mergeSharedOptions(toMerge, conflicts);
|
||||
merged.mergeOptions(toMerge, conflicts);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + name() +
|
||||
"] conflicts with existing mapping:\n" + conflicts.toString());
|
||||
}
|
||||
merged.multiFields = multiFields.merge(toMerge.multiFields);
|
||||
// apply changeable values
|
||||
merged.fieldType = toMerge.fieldType;
|
||||
merged.copyTo = toMerge.copyTo;
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge changes coming from {@code mergeWith} in place.
|
||||
*/
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
String mergedType = mergeWith.getClass().getSimpleName();
|
||||
if (mergeWith instanceof FieldMapper) {
|
||||
mergedType = ((FieldMapper) mergeWith).contentType();
|
||||
}
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType()
|
||||
+ "], merged_type [" + mergedType + "]");
|
||||
}
|
||||
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
|
||||
multiFields = multiFields.merge(fieldMergeWith.multiFields);
|
||||
private void mergeSharedOptions(FieldMapper mergeWith, List<String> conflicts) {
|
||||
|
||||
// apply changeable values
|
||||
this.fieldType = fieldMergeWith.fieldType;
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
if (Objects.equals(this.contentType(), mergeWith.contentType()) == false) {
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] cannot be changed from type [" + contentType()
|
||||
+ "] to [" + mergeWith.contentType() + "]");
|
||||
}
|
||||
|
||||
MappedFieldType other = mergeWith.fieldType;
|
||||
|
||||
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
if (indexed != mergeWithIndexed) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index] values");
|
||||
}
|
||||
if (fieldType.stored() != other.stored()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store] values");
|
||||
}
|
||||
if (fieldType.hasDocValues() != other.hasDocValues()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [doc_values] values");
|
||||
}
|
||||
if (fieldType.omitNorms() && !other.omitNorms()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [norms] values, cannot change from disable to enabled");
|
||||
}
|
||||
if (fieldType.storeTermVectors() != other.storeTermVectors()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector] values");
|
||||
}
|
||||
if (fieldType.storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values");
|
||||
}
|
||||
if (fieldType.storeTermVectorPositions() != other.storeTermVectorPositions()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values");
|
||||
}
|
||||
if (fieldType.storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values");
|
||||
}
|
||||
|
||||
// null and "default"-named index analyzers both mean the default is used
|
||||
if (fieldType.indexAnalyzer() == null || "default".equals(fieldType.indexAnalyzer().name())) {
|
||||
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
}
|
||||
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
} else if (fieldType.indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
}
|
||||
|
||||
if (Objects.equals(fieldType.similarity(), other.similarity()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [similarity]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge type-specific options and check for incompatible settings in mappings to be merged
|
||||
*/
|
||||
protected abstract void mergeOptions(FieldMapper other, List<String> conflicts);
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name());
|
||||
|
@ -73,14 +73,14 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private static class Builder extends MetadataFieldMapper.Builder<Builder> {
|
||||
static class Builder extends MetadataFieldMapper.Builder<Builder> {
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
private Builder(MappedFieldType existing) {
|
||||
Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
private Builder enabled(boolean enabled) {
|
||||
Builder enabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
return this;
|
||||
}
|
||||
|
@ -143,14 +143,13 @@ public class GeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<Geomet
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
protected void mergeGeoOptions(AbstractShapeGeometryFieldMapper<?,?> mergeWith, List<String> conflicts) {
|
||||
if (mergeWith instanceof LegacyGeoShapeFieldMapper) {
|
||||
LegacyGeoShapeFieldMapper legacy = (LegacyGeoShapeFieldMapper) mergeWith;
|
||||
throw new IllegalArgumentException("[" + fieldType().name() + "] with field mapper [" + fieldType().typeName() + "] " +
|
||||
throw new IllegalArgumentException("[" + fieldType.name() + "] with field mapper [" + fieldType.typeName() + "] " +
|
||||
"using [BKD] strategy cannot be merged with " + "[" + legacy.fieldType().typeName() + "] with [" +
|
||||
legacy.fieldType().strategy() + "] strategy");
|
||||
}
|
||||
super.doMerge(mergeWith);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -316,9 +316,4 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
@ -147,9 +147,4 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
|
@ -398,11 +398,10 @@ public class IpFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
IpFieldMapper other = (IpFieldMapper) mergeWith;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
IpFieldMapper mergeWith = (IpFieldMapper) other;
|
||||
if (mergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = mergeWith.ignoreMalformed;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,15 +218,6 @@ public final class KeywordFieldMapper extends FieldMapper {
|
||||
splitQueriesOnWhitespace == other.splitQueriesOnWhitespace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType otherFT, List<String> conflicts) {
|
||||
super.checkCompatibility(otherFT, conflicts);
|
||||
KeywordFieldType other = (KeywordFieldType) otherFT;
|
||||
if (Objects.equals(normalizer, other.normalizer) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [normalizer]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + Objects.hash(normalizer, splitQueriesOnWhitespace);
|
||||
@ -237,7 +228,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
private NamedAnalyzer normalizer() {
|
||||
NamedAnalyzer normalizer() {
|
||||
return normalizer;
|
||||
}
|
||||
|
||||
@ -395,9 +386,12 @@ public final class KeywordFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
this.ignoreAbove = ((KeywordFieldMapper) mergeWith).ignoreAbove;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
KeywordFieldMapper k = (KeywordFieldMapper) other;
|
||||
if (Objects.equals(fieldType().normalizer, k.fieldType().normalizer) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [normalizer]");
|
||||
}
|
||||
this.ignoreAbove = k.ignoreAbove;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -363,34 +363,6 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
|
||||
// prevent user from changing strategies
|
||||
if (strategy() != other.strategy()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [strategy]");
|
||||
}
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (tree().equals(other.tree()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [tree]");
|
||||
}
|
||||
|
||||
if ((pointsOnly() != other.pointsOnly())) {
|
||||
conflicts.add("mapper [" + name() + "] has different points_only");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (treeLevels() != other.treeLevels()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [tree_levels]");
|
||||
}
|
||||
if (precisionInMeters() != other.precisionInMeters()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [precision]");
|
||||
}
|
||||
}
|
||||
|
||||
public String tree() {
|
||||
return tree;
|
||||
}
|
||||
@ -553,14 +525,38 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
protected void mergeGeoOptions(AbstractShapeGeometryFieldMapper<?,?> mergeWith, List<String> conflicts) {
|
||||
|
||||
if (mergeWith instanceof GeoShapeFieldMapper) {
|
||||
GeoShapeFieldMapper fieldMapper = (GeoShapeFieldMapper) mergeWith;
|
||||
throw new IllegalArgumentException("[" + fieldType().name() + "] with field mapper [" + fieldType().typeName() + "] " +
|
||||
"using [" + fieldType().strategy() + "] strategy cannot be merged with " + "[" + fieldMapper.typeName() +
|
||||
"] with [BKD] strategy");
|
||||
}
|
||||
super.doMerge(mergeWith);
|
||||
|
||||
GeoShapeFieldType g = (GeoShapeFieldType)mergeWith.fieldType();
|
||||
// prevent user from changing strategies
|
||||
if (fieldType().strategy() != g.strategy()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [strategy]");
|
||||
}
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (fieldType().tree().equals(g.tree()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [tree]");
|
||||
}
|
||||
|
||||
if (fieldType().pointsOnly() != g.pointsOnly()) {
|
||||
conflicts.add("mapper [" + name() + "] has different points_only");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (fieldType().treeLevels() != g.treeLevels()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [tree_levels]");
|
||||
}
|
||||
if (fieldType().precisionInMeters() != g.precisionInMeters()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [precision]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -85,7 +85,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
this.docValues = ref.hasDocValues();
|
||||
this.indexAnalyzer = ref.indexAnalyzer();
|
||||
this.searchAnalyzer = ref.searchAnalyzer();
|
||||
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer();
|
||||
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer;
|
||||
this.similarity = ref.similarity();
|
||||
this.nullValue = ref.nullValue();
|
||||
this.nullValueAsString = ref.nullValueAsString();
|
||||
@ -149,8 +149,9 @@ public abstract class MappedFieldType extends FieldType {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
|
||||
int hash = Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
|
||||
eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString, meta);
|
||||
return hash;
|
||||
}
|
||||
|
||||
// TODO: we need to override freeze() and add safety checks that all settings are actually set
|
||||
@ -158,69 +159,6 @@ public abstract class MappedFieldType extends FieldType {
|
||||
/** Returns the name of this type, as would be specified in mapping properties */
|
||||
public abstract String typeName();
|
||||
|
||||
/** Checks this type is the same type as other. Adds a conflict if they are different. */
|
||||
private void checkTypeName(MappedFieldType other) {
|
||||
if (typeName().equals(other.typeName()) == false) {
|
||||
throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName()
|
||||
+ "] to [" + other.typeName() + "]");
|
||||
} else if (getClass() != other.getClass()) {
|
||||
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and "
|
||||
+ other.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for any conflicts between this field type and other.
|
||||
* If strict is true, all properties must be equal.
|
||||
* Otherwise, only properties which must never change in an index are checked.
|
||||
*/
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
checkTypeName(other);
|
||||
|
||||
boolean indexed = indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
if (indexed != mergeWithIndexed) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index] values");
|
||||
}
|
||||
if (stored() != other.stored()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store] values");
|
||||
}
|
||||
if (hasDocValues() != other.hasDocValues()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [doc_values] values");
|
||||
}
|
||||
if (omitNorms() && !other.omitNorms()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [norms] values, cannot change from disable to enabled");
|
||||
}
|
||||
if (storeTermVectors() != other.storeTermVectors()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector] values");
|
||||
}
|
||||
if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values");
|
||||
}
|
||||
if (storeTermVectorPositions() != other.storeTermVectorPositions()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values");
|
||||
}
|
||||
if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values");
|
||||
}
|
||||
|
||||
// null and "default"-named index analyzers both mean the default is used
|
||||
if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) {
|
||||
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
}
|
||||
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
} else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [analyzer]");
|
||||
}
|
||||
|
||||
if (Objects.equals(similarity(), other.similarity()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [similarity]");
|
||||
}
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@ -61,8 +60,6 @@ class MapperMergeValidator {
|
||||
} else if (fieldNames.add(name) == false) {
|
||||
throw new IllegalArgumentException("Field [" + name + "] is defined twice.");
|
||||
}
|
||||
|
||||
validateFieldMapper(fieldMapper, fieldTypes);
|
||||
}
|
||||
|
||||
Set<String> fieldAliasNames = new HashSet<>();
|
||||
@ -80,24 +77,6 @@ class MapperMergeValidator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the new field mapper does not conflict with existing mappings.
|
||||
*/
|
||||
private static void validateFieldMapper(FieldMapper fieldMapper,
|
||||
FieldTypeLookup fieldTypes) {
|
||||
MappedFieldType newFieldType = fieldMapper.fieldType();
|
||||
MappedFieldType existingFieldType = fieldTypes.get(newFieldType.name());
|
||||
|
||||
if (existingFieldType != null && Objects.equals(newFieldType, existingFieldType) == false) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
existingFieldType.checkCompatibility(newFieldType, conflicts);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + newFieldType.name() +
|
||||
"] conflicts with existing mapping:\n" + conflicts.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the new field alias is valid.
|
||||
*
|
||||
|
@ -96,7 +96,7 @@ public final class Mapping implements ToXContentFragment {
|
||||
if (mergeInto == null) {
|
||||
merged = metaMergeWith;
|
||||
} else {
|
||||
merged = mergeInto.merge(metaMergeWith);
|
||||
merged = (MetadataFieldMapper) mergeInto.merge(metaMergeWith);
|
||||
}
|
||||
mergedMetadataMappers.put(merged.getClass(), merged);
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
@ -73,7 +74,5 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper merge(Mapper mergeWith) {
|
||||
return (MetadataFieldMapper) super.merge(mergeWith);
|
||||
}
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) { }
|
||||
}
|
||||
|
@ -1094,14 +1094,18 @@ public class NumberFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
NumberFieldMapper other = (NumberFieldMapper) mergeWith;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
}
|
||||
if (other.coerce.explicit()) {
|
||||
this.coerce = other.coerce;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
NumberFieldMapper m = (NumberFieldMapper) other;
|
||||
if (fieldType().type != m.fieldType().type) {
|
||||
conflicts.add("mapper [" + name() + "] cannot be changed from type [" + fieldType().type.name +
|
||||
"] to [" + m.fieldType().type.name + "]");
|
||||
} else {
|
||||
if (m.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = m.ignoreMalformed;
|
||||
}
|
||||
if (m.coerce.explicit()) {
|
||||
this.coerce = m.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -55,6 +55,7 @@ import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@ -216,7 +217,7 @@ public class RangeFieldMapper extends FieldMapper {
|
||||
public RangeType rangeType() { return rangeType; }
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
public RangeFieldType clone() {
|
||||
return new RangeFieldType(this);
|
||||
}
|
||||
|
||||
@ -409,11 +410,10 @@ public class RangeFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
RangeFieldMapper other = (RangeFieldMapper) mergeWith;
|
||||
if (other.coerce.explicit()) {
|
||||
this.coerce = other.coerce;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
RangeFieldMapper mergeWith = (RangeFieldMapper) other;
|
||||
if (mergeWith.coerce.explicit()) {
|
||||
this.coerce = mergeWith.coerce;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,9 +194,4 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
@ -282,9 +282,4 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -41,7 +41,6 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@ -301,9 +300,8 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) other;
|
||||
if (this.enabled != sourceMergeWith.enabled) {
|
||||
conflicts.add("Cannot update enabled setting for [_source]");
|
||||
}
|
||||
@ -313,8 +311,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
||||
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
|
||||
conflicts.add("Cannot update excludes setting for [_source]");
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -487,6 +487,11 @@ public class TextFieldMapper extends FieldMapper {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "phrase";
|
||||
@ -508,6 +513,11 @@ public class TextFieldMapper extends FieldMapper {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "prefix";
|
||||
@ -769,28 +779,6 @@ public class TextFieldMapper extends FieldMapper {
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
TextFieldType tft = (TextFieldType) other;
|
||||
if (tft.indexPhrases != this.indexPhrases) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_phrases] values");
|
||||
}
|
||||
if (Objects.equals(this.prefixFieldType, tft.prefixFieldType) == false) {
|
||||
if (this.prefixFieldType == null) {
|
||||
conflicts.add("mapper [" + name()
|
||||
+ "] has different [index_prefixes] settings, cannot change from disabled to enabled");
|
||||
}
|
||||
else if (tft.prefixFieldType == null) {
|
||||
conflicts.add("mapper [" + name()
|
||||
+ "] has different [index_prefixes] settings, cannot change from enabled to disabled");
|
||||
}
|
||||
else {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_prefixes] settings");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int positionIncrementGap;
|
||||
@ -881,22 +869,20 @@ public class TextFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
TextFieldMapper mw = (TextFieldMapper) mergeWith;
|
||||
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
TextFieldMapper mw = (TextFieldMapper) other;
|
||||
if (mw.fieldType().indexPhrases != this.fieldType().indexPhrases) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_phrases] settings");
|
||||
}
|
||||
if (Objects.equals(mw.fieldType().prefixFieldType, this.fieldType().prefixFieldType) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index_prefixes] settings");
|
||||
}
|
||||
if (this.prefixFieldMapper != null && mw.prefixFieldMapper != null) {
|
||||
this.prefixFieldMapper = (PrefixFieldMapper) this.prefixFieldMapper.merge(mw.prefixFieldMapper);
|
||||
} else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) {
|
||||
throw new IllegalArgumentException("mapper [" + name() + "] has different index_prefix settings, current ["
|
||||
+ this.prefixFieldMapper + "], merged [" + mw.prefixFieldMapper + "]");
|
||||
}
|
||||
|
||||
if (this.phraseFieldMapper != null && mw.phraseFieldMapper != null) {
|
||||
this.phraseFieldMapper = (PhraseFieldMapper) this.phraseFieldMapper.merge(mw.phraseFieldMapper);
|
||||
} else if (this.fieldType().indexPhrases != mw.fieldType().indexPhrases) {
|
||||
throw new IllegalArgumentException("mapper [" + name() + "] has different index_phrases settings, current ["
|
||||
+ this.fieldType().indexPhrases + "], merged [" + mw.fieldType().indexPhrases + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,8 +332,4 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
||||
|
@ -141,8 +141,4 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
|
@ -18,10 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class BinaryFieldTypeTests extends FieldTypeTestCase {
|
||||
public class BinaryFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -41,7 +41,6 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -51,7 +50,7 @@ import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class BooleanFieldMapperTests extends FieldMapperTestCase<BooleanFieldMapper.Builder> {
|
||||
private IndexService indexService;
|
||||
private DocumentMapperParser parser;
|
||||
|
||||
@ -59,6 +58,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
setDummyNullValue(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -279,4 +279,10 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BooleanFieldMapper.Builder newBuilder() {
|
||||
return new BooleanFieldMapper.Builder("boolean");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -21,19 +21,13 @@ package org.elasticsearch.index.mapper;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.junit.Before;
|
||||
|
||||
public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
||||
public class BooleanFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new BooleanFieldMapper.BooleanFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(true);
|
||||
}
|
||||
|
||||
public void testValueFormat() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(false, ft.docValueFormat(null, null).format(0));
|
||||
|
@ -43,13 +43,16 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
|
||||
import org.hamcrest.FeatureMatcher;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hamcrest.core.CombinableMatcher;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
@ -62,7 +65,24 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class CompletionFieldMapperTests extends FieldMapperTestCase<CompletionFieldMapper.Builder> {
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addBooleanModifier("preserve_separators", false, CompletionFieldMapper.Builder::preserveSeparators);
|
||||
addBooleanModifier("preserve_position_increments", false, CompletionFieldMapper.Builder::preservePositionIncrements);
|
||||
addModifier("context_mappings", false, (a, b) -> {
|
||||
ContextMappings contextMappings = new ContextMappings(Arrays.asList(ContextBuilder.category("foo").build(),
|
||||
ContextBuilder.geo("geo").build()));
|
||||
a.contextMappings(contextMappings);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CompletionFieldMapper.Builder newBuilder() {
|
||||
return new CompletionFieldMapper.Builder("completion");
|
||||
}
|
||||
|
||||
public void testDefaultConfiguration() throws IOException {
|
||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("completion")
|
||||
@ -958,4 +978,5 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class CompletionFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new CompletionFieldMapper.CompletionFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("preserve_separators", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
cft.setPreserveSep(false);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("preserve_position_increments", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
cft.setPreservePositionIncrements(false);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("context_mappings", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
|
||||
ContextMappings contextMappings = new ContextMappings(Arrays.asList(ContextBuilder.category("foo").build(),
|
||||
ContextBuilder.geo("geo").build()));
|
||||
cft.setContextMappings(contextMappings);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -32,7 +32,6 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -42,11 +41,12 @@ import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class DateFieldMapperTests extends FieldMapperTestCase<DateFieldMapper.Builder> {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
@ -55,6 +55,13 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
addModifier("format", false, (a, b) -> {
|
||||
a.format("basic_week_date");
|
||||
});
|
||||
addModifier("locale", false, (a, b) -> {
|
||||
a.locale(Locale.CANADA);
|
||||
b.locale(Locale.JAPAN);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -62,6 +69,11 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateFieldMapper.Builder newBuilder() {
|
||||
return new DateFieldMapper.Builder("date");
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date").endObject().endObject()
|
||||
@ -398,7 +410,7 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> mapper.merge(update.mapping()));
|
||||
assertEquals("mapper [date] of different type, current_type [date], merged_type [text]", e.getMessage());
|
||||
assertEquals("mapper [date] cannot be changed from type [date] to [text]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testIllegalFormatField() throws Exception {
|
||||
@ -444,4 +456,5 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -58,33 +58,37 @@ import org.junit.Before;
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Locale;
|
||||
|
||||
public class DateFieldTypeTests extends FieldTypeTestCase {
|
||||
public class DateFieldTypeTests extends FieldTypeTestCase<DateFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new DateFieldMapper.DateFieldType();
|
||||
protected DateFieldType createDefaultFieldType() {
|
||||
return new DateFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier(t -> {
|
||||
DateFieldType copy = (DateFieldType) t.clone();
|
||||
if (copy.dateTimeFormatter == DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) {
|
||||
copy.setDateTimeFormatter(DateFormatter.forPattern("epoch_millis"));
|
||||
} else {
|
||||
copy.setDateTimeFormatter(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
|
||||
}
|
||||
return copy;
|
||||
});
|
||||
addModifier(t -> {
|
||||
DateFieldType copy = (DateFieldType) t.clone();
|
||||
if (copy.resolution() == Resolution.MILLISECONDS) {
|
||||
copy.setResolution(Resolution.NANOSECONDS);
|
||||
} else {
|
||||
copy.setResolution(Resolution.MILLISECONDS);
|
||||
}
|
||||
return copy;
|
||||
});
|
||||
}
|
||||
|
||||
private static long nowInMillis;
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10);
|
||||
addModifier(new Modifier("format", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date"));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("locale", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("strict_date_optional_time").withLocale(Locale.CANADA));
|
||||
}
|
||||
});
|
||||
nowInMillis = randomNonNegativeLong();
|
||||
}
|
||||
|
||||
public void testIsFieldWithinRangeEmptyReader() throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis);
|
||||
|
@ -39,6 +39,7 @@ import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class DocumentFieldMapperTests extends LuceneTestCase {
|
||||
|
||||
@ -114,6 +115,11 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return null;
|
||||
|
@ -73,7 +73,7 @@ public class DocumentMapperParserTests extends ESSingleNodeTestCase {
|
||||
.endObject().endObject().endObject());
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperParser.parse("type", new CompressedXContent(mapping)));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [text] to [ObjectMapper]"));
|
||||
}
|
||||
|
||||
public void testMultiFieldsWithFieldAlias() throws Exception {
|
||||
|
@ -37,6 +37,7 @@ import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
||||
@ -206,11 +207,6 @@ public class ExternalMapper extends FieldMapper {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
// ignore this for now
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType);
|
||||
@ -247,6 +243,11 @@ public class ExternalMapper extends FieldMapper {
|
||||
return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
|
||||
@ -141,13 +142,13 @@ public class FakeStringFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -36,7 +36,22 @@ import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class FieldNamesFieldMapperTests extends FieldMapperTestCase<FieldNamesFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected FieldNamesFieldMapper.Builder newBuilder() {
|
||||
return new FieldNamesFieldMapper.Builder(null);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addBooleanModifier("enabled", true, FieldNamesFieldMapper.Builder::enabled);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsDocValues() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private static SortedSet<String> extract(String path) {
|
||||
SortedSet<String> set = new TreeSet<>();
|
||||
|
@ -27,30 +27,18 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
|
||||
public class FieldNamesFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new FieldNamesFieldMapper.FieldNamesFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("enabled", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
FieldNamesFieldMapper.FieldNamesFieldType fnft = (FieldNamesFieldMapper.FieldNamesFieldType)ft;
|
||||
fnft.setEnabled(!fnft.isEnabled());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
||||
FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = new FieldNamesFieldMapper.FieldNamesFieldType();
|
||||
|
@ -20,7 +20,7 @@ package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
|
||||
|
||||
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
||||
public class GeoPointFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new GeoPointFieldType();
|
||||
|
@ -26,9 +26,9 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
@ -39,7 +39,20 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected GeoShapeFieldMapper.Builder newBuilder() {
|
||||
return new GeoShapeFieldMapper.Builder("geoshape");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier("orientation", true, (a, b) -> {
|
||||
a.orientation(ShapeBuilder.Orientation.LEFT);
|
||||
b.orientation(ShapeBuilder.Orientation.RIGHT);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
@ -302,5 +315,4 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public String toXContentString(GeoShapeFieldMapper mapper) throws IOException {
|
||||
return toXContentString(mapper, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,40 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper.GeoShapeFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class GeoShapeFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new GeoShapeFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new FieldTypeTestCase.Modifier("orientation", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -31,7 +31,7 @@ import org.mockito.Mockito;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public class IdFieldTypeTests extends FieldTypeTestCase {
|
||||
public class IdFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new IdFieldMapper.IdFieldType();
|
||||
|
@ -28,7 +28,7 @@ import org.apache.lucene.search.WildcardQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
public class IgnoredFieldTypeTests extends FieldTypeTestCase {
|
||||
public class IgnoredFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -33,7 +33,7 @@ import java.util.function.Predicate;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class IndexFieldTypeTests extends FieldTypeTestCase {
|
||||
public class IndexFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -18,19 +18,19 @@
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
|
||||
public class IpFieldTypeTests extends FieldTypeTestCase {
|
||||
import java.net.InetAddress;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class IpFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new IpFieldMapper.IpFieldType();
|
||||
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
import org.elasticsearch.plugins.AnalysisPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -58,7 +57,13 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class KeywordFieldMapperTests extends FieldMapperTestCase<KeywordFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected KeywordFieldMapper.Builder newBuilder() {
|
||||
return new KeywordFieldMapper.Builder("keyword");
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a copy of the lowercase token filter which we use for testing merge errors.
|
||||
*/
|
||||
@ -92,6 +97,10 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.put("index.analysis.normalizer.my_other_lowercase.type", "custom")
|
||||
.putList("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
addModifier("normalizer", false, (a, b) -> {
|
||||
a.normalizer(indexService.getIndexAnalyzers(), "my_lowercase");
|
||||
});
|
||||
addBooleanModifier("split_queries_on_whitespace", true, KeywordFieldMapper.Builder::splitQueriesOnWhitespace);
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
@ -347,11 +356,11 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public void testCustomNormalizer() throws IOException {
|
||||
checkLowercaseNormalizer("my_lowercase");
|
||||
}
|
||||
|
||||
|
||||
public void testInBuiltNormalizer() throws IOException {
|
||||
checkLowercaseNormalizer("lowercase");
|
||||
}
|
||||
|
||||
checkLowercaseNormalizer("lowercase");
|
||||
}
|
||||
|
||||
public void checkLowercaseNormalizer(String normalizerName) throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.analysis.LowerCaseFilter;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
@ -50,27 +51,28 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class KeywordFieldTypeTests extends FieldTypeTestCase {
|
||||
public class KeywordFieldTypeTests extends FieldTypeTestCase<KeywordFieldType> {
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("normalizer", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType type) {
|
||||
((KeywordFieldType) type).setNormalizer(Lucene.KEYWORD_ANALYZER);
|
||||
public void addModifiers() {
|
||||
addModifier(t -> {
|
||||
KeywordFieldType copy = t.clone();
|
||||
if (copy.normalizer() == null) {
|
||||
copy.setNormalizer(new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()));
|
||||
} else {
|
||||
copy.setNormalizer(null);
|
||||
}
|
||||
return copy;
|
||||
});
|
||||
addModifier(new Modifier("split_queries_on_whitespace", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType type) {
|
||||
KeywordFieldType keywordType = (KeywordFieldType) type;
|
||||
keywordType.setSplitQueriesOnWhitespace(!keywordType.splitQueriesOnWhitespace());
|
||||
}
|
||||
addModifier(t -> {
|
||||
KeywordFieldType copy = t.clone();
|
||||
copy.setSplitQueriesOnWhitespace(t.splitQueriesOnWhitespace() == false);
|
||||
return copy;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
protected KeywordFieldType createDefaultFieldType() {
|
||||
return new KeywordFieldMapper.KeywordFieldType();
|
||||
}
|
||||
|
||||
|
@ -37,9 +37,9 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
@ -53,7 +53,40 @@ import static org.hamcrest.Matchers.not;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGeoShapeFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected LegacyGeoShapeFieldMapper.Builder newBuilder() {
|
||||
return new LegacyGeoShapeFieldMapper.Builder("geoshape");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier("tree", false, (a, b) -> {
|
||||
a.fieldType().setTree("geohash");
|
||||
b.fieldType().setTree("quadtree");
|
||||
});
|
||||
addModifier("strategy", false, (a, b) -> {
|
||||
a.fieldType().setStrategy(SpatialStrategy.TERM);
|
||||
b.fieldType().setStrategy(SpatialStrategy.RECURSIVE);
|
||||
});
|
||||
addModifier("tree_levels", false, (a, b) -> {
|
||||
a.fieldType().setTreeLevels(2);
|
||||
b.fieldType().setTreeLevels(3);
|
||||
});
|
||||
addModifier("precision", false, (a, b) -> {
|
||||
a.fieldType().setPrecisionInMeters(10);
|
||||
b.fieldType().setPrecisionInMeters(20);
|
||||
});
|
||||
addModifier("distance_error_pct", true, (a, b) -> {
|
||||
a.fieldType().setDistanceErrorPct(0.5);
|
||||
b.fieldType().setDistanceErrorPct(0.6);
|
||||
});
|
||||
addModifier("orientation", true, (a, b) -> {
|
||||
a.fieldType().setOrientation(ShapeBuilder.Orientation.RIGHT);
|
||||
b.fieldType().setOrientation(ShapeBuilder.Orientation.LEFT);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
@ -19,58 +19,16 @@
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.geo.SpatialStrategy;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase {
|
||||
public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new GeoShapeFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("tree", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setTree("geohash");
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("strategy", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setStrategy(SpatialStrategy.TERM);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("tree_levels", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setTreeLevels(10);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("precision", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setPrecisionInMeters(20);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("distance_error_pct", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("orientation", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#setStrategy(SpatialStrategy)} that checks
|
||||
* that {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#pointsOnly()} gets set as a side effect when using SpatialStrategy.TERM
|
||||
|
@ -32,56 +32,6 @@ import static java.util.Collections.singletonList;
|
||||
|
||||
public class MapperMergeValidatorTests extends ESTestCase {
|
||||
|
||||
public void testMismatchedFieldTypes() {
|
||||
FieldMapper existingField = new MockFieldMapper("foo");
|
||||
FieldTypeLookup lookup = new FieldTypeLookup()
|
||||
.copyAndAddAll("type", singletonList(existingField), emptyList());
|
||||
|
||||
FieldTypeLookupTests.OtherFakeFieldType newFieldType = new FieldTypeLookupTests.OtherFakeFieldType();
|
||||
newFieldType.setName("foo");
|
||||
FieldMapper invalidField = new MockFieldMapper("foo", newFieldType);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
MapperMergeValidator.validateNewMappers(
|
||||
emptyList(),
|
||||
singletonList(invalidField),
|
||||
emptyList(),
|
||||
lookup));
|
||||
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
|
||||
}
|
||||
|
||||
public void testConflictingFieldTypes() {
|
||||
FieldMapper existingField = new MockFieldMapper("foo");
|
||||
FieldTypeLookup lookup = new FieldTypeLookup()
|
||||
.copyAndAddAll("type", singletonList(existingField), emptyList());
|
||||
|
||||
MappedFieldType newFieldType = new MockFieldMapper.FakeFieldType();
|
||||
newFieldType.setName("foo");
|
||||
newFieldType.setBoost(2.0f);
|
||||
FieldMapper validField = new MockFieldMapper("foo", newFieldType);
|
||||
|
||||
// Boost is updateable, so no exception should be thrown.
|
||||
MapperMergeValidator.validateNewMappers(
|
||||
emptyList(),
|
||||
singletonList(validField),
|
||||
emptyList(),
|
||||
lookup);
|
||||
|
||||
MappedFieldType invalidFieldType = new MockFieldMapper.FakeFieldType();
|
||||
invalidFieldType.setName("foo");
|
||||
invalidFieldType.setStored(true);
|
||||
FieldMapper invalidField = new MockFieldMapper("foo", invalidFieldType);
|
||||
|
||||
// Store is not updateable, so we expect an exception.
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
MapperMergeValidator.validateNewMappers(
|
||||
emptyList(),
|
||||
singletonList(invalidField),
|
||||
emptyList(),
|
||||
lookup));
|
||||
assertTrue(e.getMessage().contains("has different [store] values"));
|
||||
}
|
||||
|
||||
public void testDuplicateFieldAliasAndObject() {
|
||||
ObjectMapper objectMapper = createObjectMapper("some.path");
|
||||
FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field");
|
||||
|
@ -43,7 +43,7 @@ import java.util.List;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
||||
public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase<NumberFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected void setTypeList() {
|
||||
@ -483,4 +483,9 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
||||
return BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NumberFieldMapper.Builder newBuilder() {
|
||||
return new NumberFieldMapper.Builder("number", NumberType.LONG);
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class NumberFieldTypeTests extends FieldTypeTestCase {
|
||||
public class NumberFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
NumberType type;
|
||||
|
||||
|
@ -58,7 +58,7 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
|
||||
" }"),
|
||||
XContentType.JSON));
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("different type"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("cannot be changed from type"));
|
||||
}
|
||||
|
||||
public void testEmptyArrayProperties() throws Exception {
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
@ -47,7 +48,25 @@ import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
||||
public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase<RangeFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected RangeFieldMapper.Builder newBuilder() {
|
||||
return new RangeFieldMapper.Builder("range", RangeType.DATE)
|
||||
.format("iso8601");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier("format", true, (a, b) -> {
|
||||
a.format("basic_week_date");
|
||||
b.format("strict_week_date");
|
||||
});
|
||||
addModifier("locale", true, (a, b) -> {
|
||||
a.locale(Locale.CANADA);
|
||||
b.locale(Locale.JAPAN);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
@ -48,12 +48,11 @@ import org.joda.time.DateTime;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class RangeFieldTypeTests extends FieldTypeTestCase {
|
||||
public class RangeFieldTypeTests extends FieldTypeTestCase<RangeFieldType> {
|
||||
RangeType type;
|
||||
protected static String FIELDNAME = "field";
|
||||
protected static int DISTANCE = 10;
|
||||
@ -64,17 +63,14 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
|
||||
type = randomFrom(RangeType.values());
|
||||
nowInMillis = randomNonNegativeLong();
|
||||
if (type == RangeType.DATE) {
|
||||
addModifier(new Modifier("format", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date"));
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("locale", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA));
|
||||
addModifier(t -> {
|
||||
RangeFieldType other = t.clone();
|
||||
if (other.dateTimeFormatter == DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) {
|
||||
other.setDateTimeFormatter(DateFormatter.forPattern("epoch_millis"));
|
||||
} else {
|
||||
other.setDateTimeFormatter(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
|
||||
}
|
||||
return other;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ import org.apache.lucene.search.WildcardQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
public class RoutingFieldTypeTests extends FieldTypeTestCase {
|
||||
public class RoutingFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -18,10 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
|
||||
public class SourceFieldTypeTests extends FieldTypeTestCase {
|
||||
public class SourceFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new SourceFieldMapper.SourceFieldType();
|
||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.analysis.CannedTokenStream;
|
||||
import org.apache.lucene.analysis.MockSynonymAnalyzer;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
@ -59,6 +60,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||
@ -69,7 +72,6 @@ import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -85,7 +87,39 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class TextFieldMapperTests extends FieldMapperTestCase<TextFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected TextFieldMapper.Builder newBuilder() {
|
||||
return new TextFieldMapper.Builder("text")
|
||||
.indexAnalyzer(new NamedAnalyzer("a", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addBooleanModifier("fielddata", true, TextFieldMapper.Builder::fielddata);
|
||||
addModifier("fielddata_frequency_filter.min", true, (a, b) -> {
|
||||
a.fielddataFrequencyFilter(1, 10, 10);
|
||||
a.fielddataFrequencyFilter(2, 10, 10);
|
||||
});
|
||||
addModifier("fielddata_frequency_filter.max", true, (a, b) -> {
|
||||
a.fielddataFrequencyFilter(1, 10, 10);
|
||||
a.fielddataFrequencyFilter(1, 12, 10);
|
||||
});
|
||||
addModifier("fielddata_frequency_filter.min_segment_size", true, (a, b) -> {
|
||||
a.fielddataFrequencyFilter(1, 10, 10);
|
||||
a.fielddataFrequencyFilter(1, 10, 11);
|
||||
});
|
||||
addBooleanModifier("index_phrases", false, TextFieldMapper.Builder::indexPhrases);
|
||||
addModifier("index_prefixes", false, (a, b) -> {
|
||||
a.indexPrefixes(2, 4);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsDocValues() {
|
||||
return false;
|
||||
}
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
@ -38,6 +38,7 @@ import org.apache.lucene.util.automaton.Operations;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@ -47,65 +48,37 @@ import java.util.List;
|
||||
import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TextFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new TextFieldMapper.TextFieldType();
|
||||
}
|
||||
public class TextFieldTypeTests extends FieldTypeTestCase<TextFieldType> {
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("fielddata", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft;
|
||||
tft.setFielddata(tft.fielddata() == false);
|
||||
}
|
||||
public void addModifiers() {
|
||||
addModifier(t -> {
|
||||
TextFieldType copy = t.clone();
|
||||
copy.setFielddata(t.fielddata() == false);
|
||||
return copy;
|
||||
});
|
||||
addModifier(new Modifier("fielddata_frequency_filter.min", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft;
|
||||
tft.setFielddataMinFrequency(3);
|
||||
}
|
||||
addModifier(t -> {
|
||||
TextFieldType copy = t.clone();
|
||||
copy.setFielddataMaxFrequency(t.fielddataMaxFrequency() + 1);
|
||||
return copy;
|
||||
});
|
||||
addModifier(new Modifier("fielddata_frequency_filter.max", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft;
|
||||
tft.setFielddataMaxFrequency(0.2);
|
||||
}
|
||||
addModifier(t -> {
|
||||
TextFieldType copy = t.clone();
|
||||
copy.setFielddataMinFrequency(t.fielddataMinFrequency() + 1);
|
||||
return copy;
|
||||
});
|
||||
addModifier(new Modifier("fielddata_frequency_filter.min_segment_size", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft;
|
||||
tft.setFielddataMinSegmentSize(1000);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("index_phrases", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType) ft;
|
||||
tft.setIndexPhrases(true);
|
||||
}
|
||||
});
|
||||
addModifier(new Modifier("index_prefixes", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft;
|
||||
TextFieldMapper.PrefixFieldType pft = tft.getPrefixFieldType();
|
||||
if (pft == null) {
|
||||
tft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType(ft.name(), ft.name() + "._index_prefix", 3, 3));
|
||||
}
|
||||
else {
|
||||
tft.setPrefixFieldType(null);
|
||||
}
|
||||
}
|
||||
addModifier(t -> {
|
||||
TextFieldType copy = t.clone();
|
||||
copy.setFielddataMinSegmentSize(t.fielddataMinSegmentSize() + 1);
|
||||
return copy;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TextFieldType createDefaultFieldType() {
|
||||
return new TextFieldType();
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("field");
|
||||
@ -185,7 +158,7 @@ public class TextFieldTypeTests extends FieldTypeTestCase {
|
||||
}
|
||||
|
||||
public void testIndexPrefixes() {
|
||||
TextFieldMapper.TextFieldType ft = new TextFieldMapper.TextFieldType();
|
||||
TextFieldType ft = new TextFieldType();
|
||||
ft.setName("field");
|
||||
ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType("field", "field._index_prefix", 2, 10));
|
||||
|
||||
|
@ -31,7 +31,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
public class TypeFieldTypeTests extends FieldTypeTestCase {
|
||||
public class TypeFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new TypeFieldMapper.TypeFieldType();
|
||||
|
@ -101,19 +101,13 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
.startObject("properties").startObject("foo").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject();
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
assertThat(((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
equalTo("long"));
|
||||
@ -129,13 +123,9 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
.startObject("properties").startObject("foo").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject();
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE));
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
assertThat(((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
equalTo("long"));
|
||||
@ -190,7 +180,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
mapperService2.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE));
|
||||
assertThat(e.getMessage(), equalTo("mapper [foo] of different type, current_type [long], merged_type [ObjectMapper]"));
|
||||
assertThat(e.getMessage(), equalTo("mapper [foo] cannot be changed from type [long] to [ObjectMapper]"));
|
||||
}
|
||||
|
||||
public void testMappingVersion() {
|
||||
|
@ -18,10 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.VersionFieldMapper;
|
||||
|
||||
public class VersionFieldTypeTests extends FieldTypeTestCase {
|
||||
public class VersionFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new VersionFieldMapper.VersionFieldType();
|
||||
|
@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -35,7 +34,7 @@ import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase {
|
||||
public abstract class AbstractNumericFieldMapperTestCase<T extends FieldMapper.Builder<?>> extends FieldMapperTestCase<T> {
|
||||
protected Set<String> TYPES;
|
||||
protected Set<String> WHOLE_TYPES;
|
||||
protected IndexService indexService;
|
||||
|
@ -0,0 +1,208 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public abstract class FieldMapperTestCase<T extends FieldMapper.Builder<?>> extends ESSingleNodeTestCase {
|
||||
|
||||
protected final Settings SETTINGS = Settings.builder()
|
||||
.put("index.version.created", Version.CURRENT)
|
||||
.build();
|
||||
|
||||
private final class Modifier {
|
||||
final String property;
|
||||
final boolean updateable;
|
||||
final BiConsumer<T, T> modifier;
|
||||
|
||||
Modifier(String property, boolean updateable, BiConsumer<T, T> modifier) {
|
||||
this.property = property;
|
||||
this.updateable = updateable;
|
||||
this.modifier = modifier;
|
||||
}
|
||||
|
||||
void apply(T first, T second) {
|
||||
modifier.accept(first, second);
|
||||
}
|
||||
}
|
||||
|
||||
private Modifier booleanModifier(String name, boolean updateable, BiConsumer<T, Boolean> method) {
|
||||
return new Modifier(name, updateable, (a, b) -> {
|
||||
method.accept(a, true);
|
||||
method.accept(b, false);
|
||||
});
|
||||
}
|
||||
|
||||
private Object dummyNullValue = "dummyvalue";
|
||||
|
||||
/** Sets the null value used by the modifier for null value testing. This should be set in an @Before method. */
|
||||
protected void setDummyNullValue(Object value) {
|
||||
dummyNullValue = value;
|
||||
}
|
||||
|
||||
protected boolean supportsDocValues() {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean supportsStore() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private final List<Modifier> modifiers = new ArrayList<>(Arrays.asList(
|
||||
new Modifier("analyzer", false, (a, b) -> {
|
||||
a.indexAnalyzer(new NamedAnalyzer("a", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
a.indexAnalyzer(new NamedAnalyzer("b", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}),
|
||||
new Modifier("boost", true, (a, b) -> {
|
||||
a.fieldType().setBoost(1.1f);
|
||||
b.fieldType().setBoost(1.2f);
|
||||
}),
|
||||
new Modifier("doc_values", supportsDocValues() == false, (a, b) -> {
|
||||
if (supportsDocValues()) {
|
||||
a.docValues(true);
|
||||
b.docValues(false);
|
||||
}
|
||||
}),
|
||||
booleanModifier("eager_global_ordinals", true, (a, t) -> a.fieldType().setEagerGlobalOrdinals(t)),
|
||||
booleanModifier("norms", false, FieldMapper.Builder::omitNorms),
|
||||
new Modifier("null_value", true, (a, b) -> {
|
||||
a.fieldType().setNullValue(dummyNullValue);
|
||||
}),
|
||||
new Modifier("search_analyzer", true, (a, b) -> {
|
||||
a.searchAnalyzer(new NamedAnalyzer("a", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
a.searchAnalyzer(new NamedAnalyzer("b", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}),
|
||||
new Modifier("search_quote_analyzer", true, (a, b) -> {
|
||||
a.searchQuoteAnalyzer(new NamedAnalyzer("a", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
a.searchQuoteAnalyzer(new NamedAnalyzer("b", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}),
|
||||
new Modifier("similarity", false, (a, b) -> {
|
||||
a.similarity(new SimilarityProvider("a", new BM25Similarity()));
|
||||
b.similarity(new SimilarityProvider("b", new BM25Similarity()));
|
||||
}),
|
||||
new Modifier("store", supportsStore() == false, (a, b) -> {
|
||||
if (supportsStore()) {
|
||||
a.store(true);
|
||||
b.store(false);
|
||||
}
|
||||
}),
|
||||
new Modifier("term_vector", false, (a, b) -> {
|
||||
a.storeTermVectors(true);
|
||||
b.storeTermVectors(false);
|
||||
}),
|
||||
new Modifier("term_vector_positions", false, (a, b) -> {
|
||||
a.storeTermVectors(true);
|
||||
b.storeTermVectors(true);
|
||||
a.storeTermVectorPositions(true);
|
||||
b.storeTermVectorPositions(false);
|
||||
}),
|
||||
new Modifier("term_vector_payloads", false, (a, b) -> {
|
||||
a.storeTermVectors(true);
|
||||
b.storeTermVectors(true);
|
||||
a.storeTermVectorPositions(true);
|
||||
b.storeTermVectorPositions(true);
|
||||
a.storeTermVectorPayloads(true);
|
||||
b.storeTermVectorPayloads(false);
|
||||
}),
|
||||
new Modifier("term_vector_offsets", false, (a, b) -> {
|
||||
a.storeTermVectors(true);
|
||||
b.storeTermVectors(true);
|
||||
a.storeTermVectorPositions(true);
|
||||
b.storeTermVectorPositions(true);
|
||||
a.storeTermVectorOffsets(true);
|
||||
b.storeTermVectorOffsets(false);
|
||||
})
|
||||
));
|
||||
|
||||
/**
|
||||
* Add type-specific modifiers for consistency checking.
|
||||
*
|
||||
* This should be called in a {@code @Before} method
|
||||
*/
|
||||
protected void addModifier(String property, boolean updateable, BiConsumer<T, T> method) {
|
||||
modifiers.add(new Modifier(property, updateable, method));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add type-specific modifiers for consistency checking.
|
||||
*
|
||||
* This should be called in a {@code @Before} method
|
||||
*/
|
||||
protected void addBooleanModifier(String property, boolean updateable, BiConsumer<T, Boolean> method) {
|
||||
modifiers.add(new Modifier(property, updateable, (a, b) -> {
|
||||
method.accept(a, true);
|
||||
method.accept(b, false);
|
||||
}));
|
||||
}
|
||||
|
||||
protected abstract T newBuilder();
|
||||
|
||||
public void testMergeConflicts() {
|
||||
Mapper.BuilderContext context = new Mapper.BuilderContext(SETTINGS, new ContentPath(1));
|
||||
T builder1 = newBuilder();
|
||||
T builder2 = newBuilder();
|
||||
{
|
||||
FieldMapper mapper = (FieldMapper) builder1.build(context);
|
||||
FieldMapper toMerge = (FieldMapper) builder2.build(context);
|
||||
mapper.merge(toMerge); // identical mappers should merge with no issue
|
||||
}
|
||||
{
|
||||
FieldMapper mapper = (FieldMapper) newBuilder().build(context);
|
||||
FieldMapper toMerge = new MockFieldMapper("bogus") {
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "bogustype";
|
||||
}
|
||||
};
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.merge(toMerge));
|
||||
assertThat(e.getMessage(), containsString("cannot be changed from type"));
|
||||
assertThat(e.getMessage(), containsString("bogustype"));
|
||||
}
|
||||
for (Modifier modifier : modifiers) {
|
||||
builder1 = newBuilder();
|
||||
builder2 = newBuilder();
|
||||
modifier.apply(builder1, builder2);
|
||||
FieldMapper mapper = (FieldMapper) builder1.build(context);
|
||||
FieldMapper toMerge = (FieldMapper) builder2.build(context);
|
||||
if (modifier.updateable) {
|
||||
mapper.merge(toMerge);
|
||||
} else {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
"Expected an error when merging property difference " + modifier.property, () -> mapper.merge(toMerge));
|
||||
assertThat(e.getMessage(), containsString(modifier.property));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -19,234 +19,89 @@
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/** Base test case for subclasses of MappedFieldType */
|
||||
public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
public abstract class FieldTypeTestCase<T extends MappedFieldType> extends ESTestCase {
|
||||
|
||||
public static final QueryShardContext MOCK_QSC = createMockQueryShardContext(true);
|
||||
public static final QueryShardContext MOCK_QSC_DISALLOW_EXPENSIVE = createMockQueryShardContext(false);
|
||||
|
||||
/** Abstraction for mutating a property of a MappedFieldType */
|
||||
public abstract static class Modifier {
|
||||
/** The name of the property that is being modified. Used in test failure messages. */
|
||||
public final String property;
|
||||
/** True if this property is updateable, false otherwise. */
|
||||
public final boolean updateable;
|
||||
|
||||
public Modifier(String property, boolean updateable) {
|
||||
this.property = property;
|
||||
this.updateable = updateable;
|
||||
}
|
||||
|
||||
/** Modifies the property */
|
||||
public abstract void modify(MappedFieldType ft);
|
||||
/**
|
||||
* Optional method to implement that allows the field type that will be compared to be modified,
|
||||
* so that it does not have the default value for the property being modified.
|
||||
*/
|
||||
public void normalizeOther(MappedFieldType other) {}
|
||||
}
|
||||
|
||||
private final List<Modifier> modifiers = new ArrayList<>(Arrays.asList(
|
||||
new Modifier("boost", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setBoost(1.1f);
|
||||
}
|
||||
},
|
||||
new Modifier("doc_values", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setHasDocValues(ft.hasDocValues() == false);
|
||||
}
|
||||
},
|
||||
new Modifier("analyzer", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setIndexAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("analyzer", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setIndexAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setIndexAnalyzer(new NamedAnalyzer("foo", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
// check that we can update if the analyzer is unchanged
|
||||
new Modifier("analyzer", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setIndexAnalyzer(new NamedAnalyzer("foo", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setIndexAnalyzer(new NamedAnalyzer("foo", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_analyzer", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_analyzer", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSearchAnalyzer(new NamedAnalyzer("foo", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_quote_analyzer", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("search_quote_analyzer", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSearchQuoteAnalyzer(new NamedAnalyzer("foo", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
}
|
||||
},
|
||||
new Modifier("similarity", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSimilarity(new SimilarityProvider("foo", new BM25Similarity()));
|
||||
}
|
||||
},
|
||||
new Modifier("similarity", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSimilarity(new SimilarityProvider("foo", new BM25Similarity()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSimilarity(new SimilarityProvider("bar", new BM25Similarity()));
|
||||
}
|
||||
},
|
||||
// check that we can update if the similarity is unchanged
|
||||
new Modifier("similarity", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setSimilarity(new SimilarityProvider("foo", new BM25Similarity()));
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
other.setSimilarity(new SimilarityProvider("foo", new BM25Similarity()));
|
||||
}
|
||||
},
|
||||
new Modifier("eager_global_ordinals", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setEagerGlobalOrdinals(ft.eagerGlobalOrdinals() == false);
|
||||
}
|
||||
},
|
||||
new Modifier("null_value", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setNullValue(dummyNullValue);
|
||||
}
|
||||
}
|
||||
));
|
||||
|
||||
/**
|
||||
* Add a mutation that will be tested for all expected semantics of equality and compatibility.
|
||||
* These should be added in an @Before method.
|
||||
*/
|
||||
protected void addModifier(Modifier modifier) {
|
||||
modifiers.add(modifier);
|
||||
}
|
||||
|
||||
private Object dummyNullValue = "dummyvalue";
|
||||
|
||||
/** Sets the null value used by the modifier for null value testing. This should be set in an @Before method. */
|
||||
protected void setDummyNullValue(Object value) {
|
||||
dummyNullValue = value;
|
||||
}
|
||||
|
||||
/** Create a default constructed fieldtype */
|
||||
protected abstract MappedFieldType createDefaultFieldType();
|
||||
protected abstract T createDefaultFieldType();
|
||||
|
||||
MappedFieldType createNamedDefaultFieldType() {
|
||||
MappedFieldType fieldType = createDefaultFieldType();
|
||||
T createNamedDefaultFieldType() {
|
||||
T fieldType = createDefaultFieldType();
|
||||
fieldType.setName("foo");
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
// TODO: remove this once toString is no longer final on FieldType...
|
||||
protected void assertFieldTypeEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
if (ft1.equals(ft2) == false) {
|
||||
fail("Expected equality, testing property " + property + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2)
|
||||
+ "\n");
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertFieldTypeNotEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
if (ft1.equals(ft2)) {
|
||||
fail("Expected inequality, testing property " + property + "\nfirst: " + toString(ft1) + "; \nsecond: " + toString(ft2)
|
||||
+ "\n");
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertCompatible(String msg, MappedFieldType ft1, MappedFieldType ft2) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts);
|
||||
assertTrue("Found conflicts for " + msg + ": " + conflicts, conflicts.isEmpty());
|
||||
}
|
||||
|
||||
protected void assertNotCompatible(String msg, MappedFieldType ft1, MappedFieldType ft2, String... messages) {
|
||||
assert messages.length != 0;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts);
|
||||
for (String message : messages) {
|
||||
boolean found = false;
|
||||
for (String conflict : conflicts) {
|
||||
if (conflict.contains(message)) {
|
||||
found = true;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
private final List<EqualsHashCodeTestUtils.MutateFunction<T>> modifiers = new ArrayList<>(Arrays.asList(
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
copy.setName(t.name() + "-mutated");
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
copy.setBoost(t.boost() + 1);
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
NamedAnalyzer a = t.searchAnalyzer();
|
||||
if (a == null) {
|
||||
copy.setSearchAnalyzer(new NamedAnalyzer("mutated", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
return (T) copy;
|
||||
}
|
||||
assertTrue("Missing conflict for " + msg + ": [" + message + "] in conflicts " + conflicts, found);
|
||||
copy.setSearchAnalyzer(new NamedAnalyzer(a.name() + "-mutated", a.scope(), a.analyzer()));
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
NamedAnalyzer a = t.searchQuoteAnalyzer();
|
||||
if (a == null) {
|
||||
copy.setSearchQuoteAnalyzer(new NamedAnalyzer("mutated", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||
return (T) copy;
|
||||
}
|
||||
copy.setSearchQuoteAnalyzer(new NamedAnalyzer(a.name() + "-mutated", a.scope(), a.analyzer()));
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
copy.setNullValue(new Object());
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
copy.setEagerGlobalOrdinals(t.eagerGlobalOrdinals() == false);
|
||||
return (T) copy;
|
||||
},
|
||||
t -> {
|
||||
MappedFieldType copy = t.clone();
|
||||
Map<String, String> meta = new HashMap<>(t.meta());
|
||||
meta.put("bogus", "bogus");
|
||||
copy.setMeta(meta);
|
||||
return (T) copy;
|
||||
}
|
||||
}
|
||||
));
|
||||
|
||||
protected String toString(MappedFieldType ft) {
|
||||
return "MappedFieldType{" +
|
||||
"name=" + ft.name() +
|
||||
", boost=" + ft.boost() +
|
||||
", docValues=" + ft.hasDocValues() +
|
||||
", indexAnalyzer=" + ft.indexAnalyzer() +
|
||||
", searchAnalyzer=" + ft.searchAnalyzer() +
|
||||
", searchQuoteAnalyzer=" + ft.searchQuoteAnalyzer() +
|
||||
", similarity=" + ft.similarity() +
|
||||
", eagerGlobalOrdinals=" + ft.eagerGlobalOrdinals() +
|
||||
", nullValue=" + ft.nullValue() +
|
||||
", nullValueAsString='" + ft.nullValueAsString() + "'" +
|
||||
"} " + super.toString();
|
||||
protected void addModifier(EqualsHashCodeTestUtils.MutateFunction<T> modifier) {
|
||||
modifiers.add(modifier);
|
||||
}
|
||||
|
||||
protected QueryShardContext randomMockShardContext() {
|
||||
@ -261,114 +116,15 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
|
||||
public void testClone() {
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
MappedFieldType clone = fieldType.clone();
|
||||
assertNotSame(clone, fieldType);
|
||||
assertEquals(clone.getClass(), fieldType.getClass());
|
||||
assertEquals(clone, fieldType);
|
||||
assertEquals(clone, clone.clone()); // transitivity
|
||||
|
||||
for (Modifier modifier : modifiers) {
|
||||
fieldType = createNamedDefaultFieldType();
|
||||
modifier.modify(fieldType);
|
||||
clone = fieldType.clone();
|
||||
assertNotSame(clone, fieldType);
|
||||
assertFieldTypeEquals(modifier.property, clone, fieldType);
|
||||
}
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(fieldType, MappedFieldType::clone);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testEquals() {
|
||||
MappedFieldType ft1 = createNamedDefaultFieldType();
|
||||
MappedFieldType ft2 = createNamedDefaultFieldType();
|
||||
assertEquals(ft1, ft1); // reflexive
|
||||
assertEquals(ft1, ft2); // symmetric
|
||||
assertEquals(ft2, ft1);
|
||||
assertEquals(ft1.hashCode(), ft2.hashCode());
|
||||
|
||||
for (Modifier modifier : modifiers) {
|
||||
ft1 = createNamedDefaultFieldType();
|
||||
ft2 = createNamedDefaultFieldType();
|
||||
modifier.modify(ft2);
|
||||
assertFieldTypeNotEquals(modifier.property, ft1, ft2);
|
||||
assertNotEquals("hash code for modified property " + modifier.property, ft1.hashCode(), ft2.hashCode());
|
||||
// modify the same property and they are equal again
|
||||
modifier.modify(ft1);
|
||||
assertFieldTypeEquals(modifier.property, ft1, ft2);
|
||||
assertEquals("hash code for modified property " + modifier.property, ft1.hashCode(), ft2.hashCode());
|
||||
for (EqualsHashCodeTestUtils.MutateFunction<T> modifier : modifiers) {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createNamedDefaultFieldType(),
|
||||
t -> (T) t.clone(), modifier);
|
||||
}
|
||||
}
|
||||
|
||||
public void testFreeze() {
|
||||
for (Modifier modifier : modifiers) {
|
||||
MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
fieldType.freeze();
|
||||
try {
|
||||
modifier.modify(fieldType);
|
||||
fail("expected already frozen exception for property " + modifier.property);
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("already frozen"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testCheckTypeName() {
|
||||
final MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
fieldType.checkCompatibility(fieldType, conflicts); // no exception
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
||||
MappedFieldType bogus = new TermBasedFieldType() {
|
||||
@Override
|
||||
public MappedFieldType clone() {return null;}
|
||||
@Override
|
||||
public String typeName() { return fieldType.typeName();}
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) { return null; }
|
||||
};
|
||||
try {
|
||||
fieldType.checkCompatibility(bogus, conflicts);
|
||||
fail("expected bad types exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("Type names equal"));
|
||||
}
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
||||
MappedFieldType other = new TermBasedFieldType() {
|
||||
@Override
|
||||
public MappedFieldType clone() {return null;}
|
||||
@Override
|
||||
public String typeName() { return "othertype";}
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) { return null; }
|
||||
};
|
||||
try {
|
||||
fieldType.checkCompatibility(other, conflicts);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("cannot be changed from type"));
|
||||
}
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
}
|
||||
|
||||
public void testCheckCompatibility() {
|
||||
MappedFieldType ft1 = createNamedDefaultFieldType();
|
||||
MappedFieldType ft2 = createNamedDefaultFieldType();
|
||||
assertCompatible("default", ft1, ft2);
|
||||
assertCompatible("default", ft2, ft1);
|
||||
|
||||
for (Modifier modifier : modifiers) {
|
||||
ft1 = createNamedDefaultFieldType();
|
||||
ft2 = createNamedDefaultFieldType();
|
||||
modifier.normalizeOther(ft1);
|
||||
modifier.modify(ft2);
|
||||
if (modifier.updateable) {
|
||||
assertCompatible(modifier.property, ft1, ft2);
|
||||
assertCompatible(modifier.property, ft2, ft1); // always symmetric when not strict
|
||||
} else {
|
||||
// not compatible whether strict or not
|
||||
String conflict = "different [" + modifier.property + "]";
|
||||
assertNotCompatible(modifier.property, ft1, ft2, conflict);
|
||||
assertNotCompatible(modifier.property, ft2, ft1, conflict);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
// this sucks how much must be overridden just do get a dummy field mapper...
|
||||
public class MockFieldMapper extends FieldMapper {
|
||||
@ -89,4 +90,9 @@ public class MockFieldMapper extends FieldMapper {
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -57,6 +57,7 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSou
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
@ -152,9 +153,8 @@ public class HistogramFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith) {
|
||||
super.doMerge(mergeWith);
|
||||
HistogramFieldMapper gpfmMergeWith = (HistogramFieldMapper) mergeWith;
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
HistogramFieldMapper gpfmMergeWith = (HistogramFieldMapper) other;
|
||||
if (gpfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ package org.elasticsearch.xpack.analytics.mapper;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class HistogramFieldTypeTests extends FieldTypeTestCase {
|
||||
public class HistogramFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -141,7 +141,7 @@ public class TransportResumeFollowActionTests extends ESTestCase {
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2");
|
||||
mapperService.updateMapping(null, followIMD);
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, mapperService));
|
||||
assertThat(e.getMessage(), equalTo("mapper [field] of different type, current_type [text], merged_type [keyword]"));
|
||||
assertThat(e.getMessage(), equalTo("mapper [field] cannot be changed from type [text] to [keyword]"));
|
||||
}
|
||||
{
|
||||
// should fail because of non whitelisted settings not the same between leader and follow index
|
||||
|
@ -135,20 +135,6 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
||||
return Objects.equals(value, other.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType newFT, List<String> conflicts) {
|
||||
super.checkCompatibility(newFT, conflicts);
|
||||
ConstantKeywordFieldType newConstantKeywordFT = (ConstantKeywordFieldType) newFT;
|
||||
if (this.value != null) {
|
||||
if (newConstantKeywordFT.value == null) {
|
||||
conflicts.add("mapper [" + name() + "] cannot unset [value]");
|
||||
} else if (Objects.equals(value, newConstantKeywordFT.value) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [value] from the value that is configured in mappings: [" + value +
|
||||
"] vs. [" + newConstantKeywordFT.value + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + Objects.hashCode(value);
|
||||
@ -303,6 +289,19 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
ConstantKeywordFieldType newConstantKeywordFT = (ConstantKeywordFieldType) other.fieldType();
|
||||
if (this.fieldType().value != null) {
|
||||
if (newConstantKeywordFT.value == null) {
|
||||
conflicts.add("mapper [" + name() + "] cannot unset [value]");
|
||||
} else if (Objects.equals(fieldType().value, newConstantKeywordFT.value) == false) {
|
||||
conflicts.add("mapper [" + name() + "] has different [value] from the value that is configured in mappings: ["
|
||||
+ fieldType().value + "] vs. [" + newConstantKeywordFT.value + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
@ -13,25 +13,45 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapperTestCase;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin;
|
||||
import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public class ConstantKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase<ConstantKeywordFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(ConstantKeywordMapperPlugin.class, LocalStateCompositeXPackPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConstantKeywordFieldMapper.Builder newBuilder() {
|
||||
return new ConstantKeywordFieldMapper.Builder("constant");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void addModifiers() {
|
||||
addModifier("value", false, (a, b) -> {
|
||||
a.setValue("foo");
|
||||
b.setValue("bar");
|
||||
});
|
||||
addModifier("unset", false, (a, b) -> {
|
||||
a.setValue("foo");;
|
||||
});
|
||||
addModifier("value-from-null", true, (a, b) -> {
|
||||
b.setValue("bar");
|
||||
});
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
IndexService indexService = createIndex("test");
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
|
@ -13,46 +13,11 @@ import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("value", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType type) {
|
||||
((ConstantKeywordFieldType) type).setValue("bar");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testSetValue() {
|
||||
ConstantKeywordFieldType ft1 = new ConstantKeywordFieldType();
|
||||
ft1.setName("field");
|
||||
ConstantKeywordFieldType ft2 = new ConstantKeywordFieldType();
|
||||
ft2.setName("field");
|
||||
ft2.setValue("bar");
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts);
|
||||
assertEquals(Collections.emptyList(), conflicts);
|
||||
}
|
||||
|
||||
public void testUnsetValue() {
|
||||
ConstantKeywordFieldType ft1 = new ConstantKeywordFieldType();
|
||||
ft1.setName("field");
|
||||
ft1.setValue("foo");
|
||||
ConstantKeywordFieldType ft2 = new ConstantKeywordFieldType();
|
||||
ft2.setName("field");
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ft1.checkCompatibility(ft2, conflicts);
|
||||
assertEquals(Collections.singletonList("mapper [field] cannot unset [value]"), conflicts);
|
||||
}
|
||||
public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase<MappedFieldType> {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user