Convert all FieldMappers in mapper-extras to parametrized form (#62938) (#63034)

This converts RankFeatureFieldMapper, RankFeaturesFieldMapper,
SearchAsYouTypeFieldMapper and TokenCountFieldMapper to
parametrized forms. It also adds a TextParams utility class to core
containing functions that help declare text parameters - mainly shared
between SearchAsYouTypeFieldMapper and KeywordFieldMapper at
the moment, but it will come in handy when we convert TextFieldMapper
and friends.

Relates to #62988
This commit is contained in:
Alan Woodward 2020-09-29 20:50:34 +01:00 committed by GitHub
parent 454aafde1f
commit 2f5a813589
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 451 additions and 492 deletions

View File

@ -36,10 +36,10 @@ public class MapperExtrasPlugin extends Plugin implements MapperPlugin, SearchPl
public Map<String, Mapper.TypeParser> getMappers() { public Map<String, Mapper.TypeParser> getMappers() {
Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>(); Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();
mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, ScaledFloatFieldMapper.PARSER); mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, ScaledFloatFieldMapper.PARSER);
mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); mappers.put(TokenCountFieldMapper.CONTENT_TYPE, TokenCountFieldMapper.PARSER);
mappers.put(RankFeatureFieldMapper.CONTENT_TYPE, new RankFeatureFieldMapper.TypeParser()); mappers.put(RankFeatureFieldMapper.CONTENT_TYPE, RankFeatureFieldMapper.PARSER);
mappers.put(RankFeaturesFieldMapper.CONTENT_TYPE, new RankFeaturesFieldMapper.TypeParser()); mappers.put(RankFeaturesFieldMapper.CONTENT_TYPE, RankFeaturesFieldMapper.PARSER);
mappers.put(SearchAsYouTypeFieldMapper.CONTENT_TYPE, new SearchAsYouTypeFieldMapper.TypeParser()); mappers.put(SearchAsYouTypeFieldMapper.CONTENT_TYPE, SearchAsYouTypeFieldMapper.PARSER);
return Collections.unmodifiableMap(mappers); return Collections.unmodifiableMap(mappers);
} }

View File

@ -26,15 +26,13 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -42,7 +40,7 @@ import java.util.function.Supplier;
/** /**
* A {@link FieldMapper} that exposes Lucene's {@link FeatureField}. * A {@link FieldMapper} that exposes Lucene's {@link FeatureField}.
*/ */
public class RankFeatureFieldMapper extends FieldMapper { public class RankFeatureFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "rank_feature"; public static final String CONTENT_TYPE = "rank_feature";
@ -57,43 +55,34 @@ public class RankFeatureFieldMapper extends FieldMapper {
} }
} }
public static class Builder extends FieldMapper.Builder<Builder> { private static RankFeatureFieldType ft(FieldMapper in) {
return ((RankFeatureFieldMapper)in).fieldType();
}
private boolean positiveScoreImpact = true; public static class Builder extends ParametrizedFieldMapper.Builder {
private final Parameter<Boolean> positiveScoreImpact
= Parameter.boolParam("positive_score_impact", false, m -> ft(m).positiveScoreImpact, true);
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
public Builder(String name) { public Builder(String name) {
super(name, Defaults.FIELD_TYPE); super(name);
builder = this;
} }
public Builder positiveScoreImpact(boolean v) { @Override
this.positiveScoreImpact = v; protected List<Parameter<?>> getParameters() {
return builder; return Arrays.asList(positiveScoreImpact, meta);
} }
@Override @Override
public RankFeatureFieldMapper build(BuilderContext context) { public RankFeatureFieldMapper build(BuilderContext context) {
return new RankFeatureFieldMapper(name, fieldType, new RankFeatureFieldType(buildFullName(context), meta, positiveScoreImpact), return new RankFeatureFieldMapper(name,
multiFieldsBuilder.build(this, context), copyTo, positiveScoreImpact); new RankFeatureFieldType(buildFullName(context), meta.getValue(), positiveScoreImpact.getValue()),
multiFieldsBuilder.build(this, context), copyTo.build(), positiveScoreImpact.getValue());
} }
} }
public static class TypeParser implements Mapper.TypeParser { public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
@Override
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
RankFeatureFieldMapper.Builder builder = new RankFeatureFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("positive_score_impact")) {
builder.positiveScoreImpact(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
}
}
return builder;
}
}
public static final class RankFeatureFieldType extends MappedFieldType { public static final class RankFeatureFieldType extends MappedFieldType {
@ -132,10 +121,9 @@ public class RankFeatureFieldMapper extends FieldMapper {
private final boolean positiveScoreImpact; private final boolean positiveScoreImpact;
private RankFeatureFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType, private RankFeatureFieldMapper(String simpleName, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo, boolean positiveScoreImpact) { MultiFields multiFields, CopyTo copyTo, boolean positiveScoreImpact) {
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo); super(simpleName, mappedFieldType, multiFields, copyTo);
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
this.positiveScoreImpact = positiveScoreImpact; this.positiveScoreImpact = positiveScoreImpact;
} }
@ -201,23 +189,7 @@ public class RankFeatureFieldMapper extends FieldMapper {
} }
@Override @Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { public ParametrizedFieldMapper.Builder getMergeBuilder() {
super.doXContentBody(builder, includeDefaults, params); return new Builder(simpleName()).init(this);
if (includeDefaults || positiveScoreImpact == false) {
builder.field("positive_score_impact", positiveScoreImpact);
}
}
@Override
protected boolean docValuesByDefault() {
return false;
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
if (positiveScoreImpact != ((RankFeatureFieldMapper)other).positiveScoreImpact) {
conflicts.add("mapper [" + name() + "] has different [positive_score_impact] values");
}
} }
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FeatureField; import org.apache.lucene.document.FeatureField;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
@ -30,6 +29,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -38,42 +38,33 @@ import java.util.function.Supplier;
* A {@link FieldMapper} that exposes Lucene's {@link FeatureField} as a sparse * A {@link FieldMapper} that exposes Lucene's {@link FeatureField} as a sparse
* vector of features. * vector of features.
*/ */
public class RankFeaturesFieldMapper extends FieldMapper { public class RankFeaturesFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "rank_features"; public static final String CONTENT_TYPE = "rank_features";
public static class Defaults { public static class Builder extends ParametrizedFieldMapper.Builder {
public static final FieldType FIELD_TYPE = new FieldType();
static { private final Parameter<Map<String, String>> meta = Parameter.metaParam();
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
}
public static class Builder extends FieldMapper.Builder<Builder> {
public Builder(String name) { public Builder(String name) {
super(name, Defaults.FIELD_TYPE); super(name);
builder = this; builder = this;
} }
@Override
protected List<Parameter<?>> getParameters() {
return Collections.singletonList(meta);
}
@Override @Override
public RankFeaturesFieldMapper build(BuilderContext context) { public RankFeaturesFieldMapper build(BuilderContext context) {
return new RankFeaturesFieldMapper( return new RankFeaturesFieldMapper(
name, fieldType, new RankFeaturesFieldType(buildFullName(context), meta), name, new RankFeaturesFieldType(buildFullName(context), meta.getValue()),
multiFieldsBuilder.build(this, context), copyTo); multiFieldsBuilder.build(this, context), copyTo.build());
} }
} }
public static class TypeParser implements Mapper.TypeParser { public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
@Override
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
return new Builder(name);
}
}
public static final class RankFeaturesFieldType extends MappedFieldType { public static final class RankFeaturesFieldType extends MappedFieldType {
@ -103,22 +94,22 @@ public class RankFeaturesFieldMapper extends FieldMapper {
} }
} }
private RankFeaturesFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType, private RankFeaturesFieldMapper(String simpleName, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo) { MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo); super(simpleName, mappedFieldType, multiFields, copyTo);
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
} }
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName()).init(this);
}
@Override @Override
protected RankFeaturesFieldMapper clone() { protected RankFeaturesFieldMapper clone() {
return (RankFeaturesFieldMapper) super.clone(); return (RankFeaturesFieldMapper) super.clone();
} }
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
}
@Override @Override
public RankFeaturesFieldType fieldType() { public RankFeaturesFieldType fieldType() {
return (RankFeaturesFieldType) super.fieldType(); return (RankFeaturesFieldType) super.fieldType();
@ -157,7 +148,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
} }
@Override @Override
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) {
throw new AssertionError("parse is implemented directly"); throw new AssertionError("parse is implemented directly");
} }
@ -174,16 +165,6 @@ public class RankFeaturesFieldMapper extends FieldMapper {
}; };
} }
@Override
protected boolean indexedByDefault() {
return false;
}
@Override
protected boolean docValuesByDefault() {
return false;
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -47,12 +47,10 @@ import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException; import java.io.IOException;
@ -63,11 +61,9 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
import static org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType.hasGaps; import static org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType.hasGaps;
import static org.elasticsearch.index.mapper.TypeParsers.checkNull;
import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
/** /**
* Mapper for a text field that optimizes itself for as-you-type completion by indexing its content into subfields. Each subfield * Mapper for a text field that optimizes itself for as-you-type completion by indexing its content into subfields. Each subfield
@ -83,7 +79,7 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
* [ PrefixFieldMapper, PrefixFieldType, analysis wrapped with max_shingle_size-shingles and edge-ngrams ] * [ PrefixFieldMapper, PrefixFieldType, analysis wrapped with max_shingle_size-shingles and edge-ngrams ]
* </pre> * </pre>
*/ */
public class SearchAsYouTypeFieldMapper extends FieldMapper { public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "search_as_you_type"; public static final String CONTENT_TYPE = "search_as_you_type";
private static final int MAX_SHINGLE_SIZE_LOWER_BOUND = 2; private static final int MAX_SHINGLE_SIZE_LOWER_BOUND = 2;
@ -91,87 +87,84 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
private static final String PREFIX_FIELD_SUFFIX = "._index_prefix"; private static final String PREFIX_FIELD_SUFFIX = "._index_prefix";
public static class Defaults { public static class Defaults {
public static final int MIN_GRAM = 1; public static final int MIN_GRAM = 1;
public static final int MAX_GRAM = 20; public static final int MAX_GRAM = 20;
public static final int MAX_SHINGLE_SIZE = 3; public static final int MAX_SHINGLE_SIZE = 3;
public static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.freeze();
}
} }
public static class TypeParser implements Mapper.TypeParser { public static final TypeParser PARSER
= new TypeParser((n, c) -> new Builder(n, () -> c.getIndexAnalyzers().getDefaultIndexAnalyzer()));
@Override private static SearchAsYouTypeFieldMapper toType(FieldMapper in) {
public Mapper.Builder<?> parse(String name, return (SearchAsYouTypeFieldMapper) in;
Map<String, Object> node, }
ParserContext parserContext) throws MapperParsingException {
final Builder builder = new Builder(name); private static SearchAsYouTypeFieldType ft(FieldMapper in) {
return toType(in).fieldType();
}
builder.indexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); public static class Builder extends ParametrizedFieldMapper.Builder {
builder.searchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
builder.searchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()); private final Parameter<Boolean> index = Parameter.indexParam(m -> toType(m).index, true);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { private final Parameter<Boolean> store = Parameter.storeParam(m -> toType(m).store, false);
final Map.Entry<String, Object> entry = iterator.next();
final String fieldName = entry.getKey(); // This is only here because for some reason the initial impl of this always serialized
final Object fieldNode = entry.getValue(); // `doc_values=false`, even though it cannot be set; and so we need to continue
checkNull(fieldName, fieldNode); // serializing it forever because of mapper assertions in mixed clusters.
if (fieldName.equals("max_shingle_size")) { private final Parameter<Boolean> docValues = Parameter.docValuesParam(m -> false, false)
builder.maxShingleSize(nodeIntegerValue(fieldNode)); .setValidator(v -> {
iterator.remove(); if (v) {
} else if (fieldName.equals("similarity")) { throw new MapperParsingException("Cannot set [doc_values] on field of type [search_as_you_type]");
SimilarityProvider similarityProvider = TypeParsers.resolveSimilarity(parserContext, fieldName, fieldNode.toString());
builder.similarity(similarityProvider);
iterator.remove();
} }
// TODO should we allow to configure the prefix field })
} .alwaysSerialize();
parseTextField(builder, name, node, parserContext);
return builder;
}
}
public static class Builder extends FieldMapper.Builder<Builder> { private final Parameter<Integer> maxShingleSize = Parameter.intParam("max_shingle_size", false,
private int maxShingleSize = Defaults.MAX_SHINGLE_SIZE; m -> toType(m).maxShingleSize, Defaults.MAX_SHINGLE_SIZE)
private SimilarityProvider similarity; .setValidator(v -> {
if (v < MAX_SHINGLE_SIZE_LOWER_BOUND || v > MAX_SHINGLE_SIZE_UPPER_BOUND) {
throw new MapperParsingException("[max_shingle_size] must be at least [" + MAX_SHINGLE_SIZE_LOWER_BOUND
+ "] and at most " + "[" + MAX_SHINGLE_SIZE_UPPER_BOUND + "], got [" + v + "]");
}
})
.alwaysSerialize();
public Builder(String name) { final TextParams.Analyzers analyzers;
super(name, Defaults.FIELD_TYPE); final Parameter<SimilarityProvider> similarity = TextParams.similarity(m -> ft(m).getTextSearchInfo().getSimilarity());
this.builder = this;
}
public void similarity(SimilarityProvider similarity) { final Parameter<String> indexOptions = TextParams.indexOptions(m -> toType(m).indexOptions);
this.similarity = similarity; final Parameter<Boolean> norms = TextParams.norms(true, m -> ft(m).getTextSearchInfo().hasNorms());
} final Parameter<String> termVectors = TextParams.termVectors(m -> toType(m).termVectors);
public Builder maxShingleSize(int maxShingleSize) { private final Parameter<Map<String, String>> meta = Parameter.metaParam();
if (maxShingleSize < MAX_SHINGLE_SIZE_LOWER_BOUND || maxShingleSize > MAX_SHINGLE_SIZE_UPPER_BOUND) {
throw new MapperParsingException("[max_shingle_size] must be at least [" + MAX_SHINGLE_SIZE_LOWER_BOUND + "] and at most " + public Builder(String name, Supplier<NamedAnalyzer> defaultAnalyzer) {
"[" + MAX_SHINGLE_SIZE_UPPER_BOUND + "], got [" + maxShingleSize + "]"); super(name);
} this.analyzers = new TextParams.Analyzers(defaultAnalyzer);
this.maxShingleSize = maxShingleSize;
return builder;
} }
@Override @Override
public Builder docValues(boolean docValues) { protected List<Parameter<?>> getParameters() {
if (docValues) { return Arrays.asList(index, store, docValues, maxShingleSize,
throw new IllegalArgumentException("mapper [" + name() + "] of type [search_as_you_type] does not support doc values"); analyzers.indexAnalyzer, analyzers.searchAnalyzer, analyzers.searchQuoteAnalyzer, similarity,
} indexOptions, norms, termVectors, meta);
return this;
} }
@Override @Override
public SearchAsYouTypeFieldMapper build(Mapper.BuilderContext context) { public SearchAsYouTypeFieldMapper build(Mapper.BuilderContext context) {
SearchAsYouTypeFieldType ft = new SearchAsYouTypeFieldType(buildFullName(context), fieldType, similarity, FieldType fieldType = new FieldType();
searchAnalyzer, searchQuoteAnalyzer, meta); fieldType.setIndexOptions(TextParams.toIndexOptions(index.getValue(), indexOptions.getValue()));
ft.setIndexAnalyzer(indexAnalyzer); fieldType.setOmitNorms(norms.getValue() == false);
fieldType.setStored(store.getValue());
TextParams.setTermVectorParams(termVectors.getValue(), fieldType);
NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer();
NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer();
SearchAsYouTypeFieldType ft = new SearchAsYouTypeFieldType(buildFullName(context), fieldType, similarity.getValue(),
analyzers.getSearchAnalyzer(), analyzers.getSearchQuoteAnalyzer(), meta.getValue());
ft.setIndexAnalyzer(analyzers.getIndexAnalyzer());
// set up the prefix field // set up the prefix field
FieldType prefixft = new FieldType(fieldType); FieldType prefixft = new FieldType(fieldType);
@ -181,21 +174,20 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
final String fullName = buildFullName(context); final String fullName = buildFullName(context);
// wrap the root field's index analyzer with shingles and edge ngrams // wrap the root field's index analyzer with shingles and edge ngrams
final Analyzer prefixIndexWrapper = final Analyzer prefixIndexWrapper =
SearchAsYouTypeAnalyzer.withShingleAndPrefix(indexAnalyzer.analyzer(), maxShingleSize); SearchAsYouTypeAnalyzer.withShingleAndPrefix(indexAnalyzer.analyzer(), maxShingleSize.getValue());
// wrap the root field's search analyzer with only shingles // wrap the root field's search analyzer with only shingles
final NamedAnalyzer prefixSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(), final NamedAnalyzer prefixSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(),
SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), maxShingleSize)); SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), maxShingleSize.getValue()));
// don't wrap the root field's search quote analyzer as prefix field doesn't support phrase queries // don't wrap the root field's search quote analyzer as prefix field doesn't support phrase queries
TextSearchInfo prefixSearchInfo = new TextSearchInfo(prefixft, similarity, prefixSearchWrapper, searchQuoteAnalyzer); TextSearchInfo prefixSearchInfo = new TextSearchInfo(prefixft, similarity.getValue(), prefixSearchWrapper, searchAnalyzer);
final PrefixFieldType prefixFieldType final PrefixFieldType prefixFieldType
= new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM); = new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM);
prefixFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper)); prefixFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper));
final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixFieldType); final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixFieldType);
// set up the shingle fields // set up the shingle fields
final ShingleFieldMapper[] shingleFieldMappers = new ShingleFieldMapper[maxShingleSize - 1]; final ShingleFieldMapper[] shingleFieldMappers = new ShingleFieldMapper[maxShingleSize.getValue() - 1];
final ShingleFieldType[] shingleFieldTypes = new ShingleFieldType[maxShingleSize - 1]; final ShingleFieldType[] shingleFieldTypes = new ShingleFieldType[maxShingleSize.getValue() - 1];
for (int i = 0; i < shingleFieldMappers.length; i++) { for (int i = 0; i < shingleFieldMappers.length; i++) {
final int shingleSize = i + 2; final int shingleSize = i + 2;
FieldType shingleft = new FieldType(fieldType); FieldType shingleft = new FieldType(fieldType);
@ -206,10 +198,10 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
SearchAsYouTypeAnalyzer.withShingle(indexAnalyzer.analyzer(), shingleSize); SearchAsYouTypeAnalyzer.withShingle(indexAnalyzer.analyzer(), shingleSize);
final NamedAnalyzer shingleSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(), final NamedAnalyzer shingleSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(),
SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize)); SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize));
final NamedAnalyzer shingleSearchQuoteWrapper = new NamedAnalyzer(searchQuoteAnalyzer.name(), searchQuoteAnalyzer.scope(), final NamedAnalyzer shingleSearchQuoteWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(),
SearchAsYouTypeAnalyzer.withShingle(searchQuoteAnalyzer.analyzer(), shingleSize)); SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize));
TextSearchInfo textSearchInfo TextSearchInfo textSearchInfo
= new TextSearchInfo(shingleft, similarity, shingleSearchWrapper, shingleSearchQuoteWrapper); = new TextSearchInfo(shingleft, similarity.getValue(), shingleSearchWrapper, shingleSearchQuoteWrapper);
final ShingleFieldType shingleFieldType = new ShingleFieldType(fieldName, shingleSize, textSearchInfo); final ShingleFieldType shingleFieldType = new ShingleFieldType(fieldName, shingleSize, textSearchInfo);
shingleFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper)); shingleFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper));
shingleFieldType.setPrefixFieldType(prefixFieldType); shingleFieldType.setPrefixFieldType(prefixFieldType);
@ -218,8 +210,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
} }
ft.setPrefixField(prefixFieldType); ft.setPrefixField(prefixFieldType);
ft.setShingleFields(shingleFieldTypes); ft.setShingleFields(shingleFieldTypes);
return new SearchAsYouTypeFieldMapper(name, fieldType, ft, copyTo, return new SearchAsYouTypeFieldMapper(name, ft, copyTo.build(), prefixFieldMapper, shingleFieldMappers, this);
maxShingleSize, prefixFieldMapper, shingleFieldMappers);
} }
} }
@ -243,6 +234,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
*/ */
static class SearchAsYouTypeFieldType extends StringFieldType { static class SearchAsYouTypeFieldType extends StringFieldType {
final FieldType fieldType;
PrefixFieldType prefixField; PrefixFieldType prefixField;
ShingleFieldType[] shingleFields = new ShingleFieldType[0]; ShingleFieldType[] shingleFields = new ShingleFieldType[0];
@ -250,6 +242,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuoteAnalyzer, Map<String, String> meta) { NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuoteAnalyzer, Map<String, String> meta) {
super(name, fieldType.indexOptions() != IndexOptions.NONE, fieldType.stored(), false, super(name, fieldType.indexOptions() != IndexOptions.NONE, fieldType.stored(), false,
new TextSearchInfo(fieldType, similarity, searchAnalyzer, searchQuoteAnalyzer), meta); new TextSearchInfo(fieldType, similarity, searchAnalyzer, searchQuoteAnalyzer), meta);
this.fieldType = fieldType;
} }
public void setPrefixField(PrefixFieldType prefixField) { public void setPrefixField(PrefixFieldType prefixField) {
@ -538,21 +531,29 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
} }
} }
private final boolean index;
private final boolean store;
private final String indexOptions;
private final String termVectors;
private final int maxShingleSize; private final int maxShingleSize;
private PrefixFieldMapper prefixField; private final PrefixFieldMapper prefixField;
private final ShingleFieldMapper[] shingleFields; private final ShingleFieldMapper[] shingleFields;
public SearchAsYouTypeFieldMapper(String simpleName, public SearchAsYouTypeFieldMapper(String simpleName,
FieldType fieldType,
SearchAsYouTypeFieldType mappedFieldType, SearchAsYouTypeFieldType mappedFieldType,
CopyTo copyTo, CopyTo copyTo,
int maxShingleSize,
PrefixFieldMapper prefixField, PrefixFieldMapper prefixField,
ShingleFieldMapper[] shingleFields) { ShingleFieldMapper[] shingleFields,
super(simpleName, fieldType, mappedFieldType, MultiFields.empty(), copyTo); Builder builder) {
super(simpleName, mappedFieldType, MultiFields.empty(), copyTo);
this.prefixField = prefixField; this.prefixField = prefixField;
this.shingleFields = shingleFields; this.shingleFields = shingleFields;
this.maxShingleSize = maxShingleSize; this.maxShingleSize = builder.maxShingleSize.getValue();
this.index = builder.index.getValue();
this.store = builder.store.getValue();
this.indexOptions = builder.indexOptions.getValue();
this.termVectors = builder.termVectors.getValue();
} }
@Override @Override
@ -562,12 +563,12 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
return; return;
} }
context.doc().add(new Field(fieldType().name(), value, fieldType)); context.doc().add(new Field(fieldType().name(), value, fieldType().fieldType));
for (ShingleFieldMapper subFieldMapper : shingleFields) { for (ShingleFieldMapper subFieldMapper : shingleFields) {
context.doc().add(new Field(subFieldMapper.fieldType().name(), value, subFieldMapper.getLuceneFieldType())); context.doc().add(new Field(subFieldMapper.fieldType().name(), value, subFieldMapper.getLuceneFieldType()));
} }
context.doc().add(new Field(prefixField.fieldType().name(), value, prefixField.getLuceneFieldType())); context.doc().add(new Field(prefixField.fieldType().name(), value, prefixField.getLuceneFieldType()));
if (fieldType.omitNorms()) { if (fieldType().fieldType.omitNorms()) {
createFieldNamesField(context); createFieldNamesField(context);
} }
} }
@ -583,20 +584,8 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
} }
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { public ParametrizedFieldMapper.Builder getMergeBuilder() {
final SearchAsYouTypeFieldMapper m = (SearchAsYouTypeFieldMapper) other; return new Builder(simpleName(), () -> fieldType().indexAnalyzer()).init(this);
if (this.shingleFields.length != m.shingleFields.length) {
conflicts.add("mapper [" + name() + "] has a different [max_shingle_size]");
} else {
this.prefixField = (PrefixFieldMapper) this.prefixField.merge(m.prefixField);
for (int i = 0; i < m.shingleFields.length; i++) {
this.shingleFields[i] = (ShingleFieldMapper) this.shingleFields[i].merge(m.shingleFields[i]);
}
}
if (Objects.equals(this.fieldType().getTextSearchInfo().getSimilarity(),
other.fieldType().getTextSearchInfo().getSimilarity()) == false) {
conflicts.add("mapper [" + name() + "] has different [similarity] settings");
}
} }
public static String getShingleFieldName(String parentField, int shingleSize) { public static String getShingleFieldName(String parentField, int shingleSize) {
@ -620,18 +609,6 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
return shingleFields; return shingleFields;
} }
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
doXContentAnalyzers(builder, includeDefaults);
if (fieldType().getTextSearchInfo().getSimilarity() != null) {
builder.field("similarity", fieldType().getTextSearchInfo().getSimilarity().name());
} else if (includeDefaults) {
builder.field("similarity", SimilarityService.DEFAULT_SIMILARITY);
}
builder.field("max_shingle_size", maxShingleSize);
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
List<Mapper> subIterators = new ArrayList<>(); List<Mapper> subIterators = new ArrayList<>();

View File

@ -22,115 +22,86 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.FieldType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
/** /**
* A {@link FieldMapper} that takes a string and writes a count of the tokens in that string * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string
* to the index. In most ways the mapper acts just like an {@link NumberFieldMapper}. * to the index. In most ways the mapper acts just like an {@link NumberFieldMapper}.
*/ */
public class TokenCountFieldMapper extends FieldMapper { public class TokenCountFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "token_count"; public static final String CONTENT_TYPE = "token_count";
public static class Defaults { private static TokenCountFieldMapper toType(FieldMapper in) {
public static final boolean DEFAULT_POSITION_INCREMENTS = true; return (TokenCountFieldMapper) in;
} }
public static class Builder extends FieldMapper.Builder<Builder> { public static class Builder extends ParametrizedFieldMapper.Builder {
private NamedAnalyzer analyzer;
private Integer nullValue; private final Parameter<Boolean> index = Parameter.indexParam(m -> toType(m).index, true);
private boolean enablePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS; private final Parameter<Boolean> hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true);
private final Parameter<Boolean> store = Parameter.storeParam(m -> toType(m).store, false);
private final Parameter<NamedAnalyzer> analyzer
= Parameter.analyzerParam("analyzer", true, m -> toType(m).analyzer, () -> null);
private final Parameter<Integer> nullValue = new Parameter<>(
"null_value", false, () -> null,
(n, c, o) -> o == null ? null : nodeIntegerValue(o), m -> toType(m).nullValue).acceptsNull();
private final Parameter<Boolean> enablePositionIncrements
= Parameter.boolParam("enable_position_increments", false, m -> toType(m).enablePositionIncrements, true);
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
public Builder(String name) { public Builder(String name) {
super(name, new FieldType()); super(name);
builder = this;
} }
public Builder analyzer(NamedAnalyzer analyzer) { @Override
this.analyzer = analyzer; protected List<Parameter<?>> getParameters() {
return this; return Arrays.asList(index, hasDocValues, store, analyzer, nullValue, enablePositionIncrements, meta);
}
public NamedAnalyzer analyzer() {
return analyzer;
}
public Builder enablePositionIncrements(boolean enablePositionIncrements) {
this.enablePositionIncrements = enablePositionIncrements;
return this;
}
public boolean enablePositionIncrements() {
return enablePositionIncrements;
}
public Builder nullValue(Integer nullValue) {
this.nullValue = nullValue;
return this;
} }
@Override @Override
public TokenCountFieldMapper build(BuilderContext context) { public TokenCountFieldMapper build(BuilderContext context) {
return new TokenCountFieldMapper(name, fieldType, if (analyzer.getValue() == null) {
new NumberFieldMapper.NumberFieldType(buildFullName(context), NumberFieldMapper.NumberType.INTEGER),
analyzer, enablePositionIncrements, nullValue,
multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TokenCountFieldMapper.Builder builder = new TokenCountFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.analyzer(analyzer);
iterator.remove();
} else if (propName.equals("enable_position_increments")) {
builder.enablePositionIncrements(nodeBooleanValue(propNode));
iterator.remove();
}
}
parseField(builder, name, node, parserContext);
if (builder.analyzer() == null) {
throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't.");
} }
return builder; MappedFieldType ft = new NumberFieldMapper.NumberFieldType(
buildFullName(context),
NumberFieldMapper.NumberType.INTEGER,
index.getValue(),
store.getValue(),
hasDocValues.getValue(),
meta.getValue());
return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
} }
} }
private NamedAnalyzer analyzer; public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
private final boolean enablePositionIncrements;
private Integer nullValue;
protected TokenCountFieldMapper(String simpleName, FieldType fieldType, MappedFieldType defaultFieldType, private final boolean index;
NamedAnalyzer analyzer, boolean enablePositionIncrements, Integer nullValue, private final boolean hasDocValues;
MultiFields multiFields, CopyTo copyTo) { private final boolean store;
super(simpleName, fieldType, defaultFieldType, multiFields, copyTo); private final NamedAnalyzer analyzer;
this.analyzer = analyzer; private final boolean enablePositionIncrements;
this.enablePositionIncrements = enablePositionIncrements; private final Integer nullValue;
this.nullValue = nullValue;
protected TokenCountFieldMapper(String simpleName, MappedFieldType defaultFieldType,
MultiFields multiFields, CopyTo copyTo, Builder builder) {
super(simpleName, defaultFieldType, multiFields, copyTo);
this.analyzer = builder.analyzer.getValue();
this.enablePositionIncrements = builder.enablePositionIncrements.getValue();
this.nullValue = builder.nullValue.getValue();
this.index = builder.index.getValue();
this.hasDocValues = builder.hasDocValues.getValue();
this.store = builder.store.getValue();
} }
@Override @Override
@ -153,10 +124,9 @@ public class TokenCountFieldMapper extends FieldMapper {
tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements); tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements);
} }
boolean indexed = fieldType().isSearchable(); context.doc().addAll(
boolean docValued = fieldType().hasDocValues(); NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, index, hasDocValues, store)
boolean stored = fieldType.stored(); );
context.doc().addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, indexed, docValued, stored));
} }
@Override @Override
@ -224,21 +194,7 @@ public class TokenCountFieldMapper extends FieldMapper {
} }
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { public ParametrizedFieldMapper.Builder getMergeBuilder() {
// TODO we should ban updating analyzers and null values as well return new Builder(simpleName()).init(this);
if (this.enablePositionIncrements != ((TokenCountFieldMapper)other).enablePositionIncrements) {
conflicts.add("mapper [" + name() + "] has a different [enable_position_increments] setting");
}
this.analyzer = ((TokenCountFieldMapper)other).analyzer;
} }
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
builder.field("analyzer", analyzer());
if (includeDefaults || enablePositionIncrements() != Defaults.DEFAULT_POSITION_INCREMENTS) {
builder.field("enable_position_increments", enablePositionIncrements());
}
}
} }

View File

@ -32,27 +32,20 @@ import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matchers;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Set;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatureFieldMapper.Builder> { public class RankFeatureFieldMapperTests extends MapperTestCase {
@Override @Override
protected void writeFieldValue(XContentBuilder builder) throws IOException { protected void writeFieldValue(XContentBuilder builder) throws IOException {
builder.value(10); builder.value(10);
} }
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
}
@Override @Override
protected void registerParameters(ParameterChecker checker) throws IOException { protected void registerParameters(ParameterChecker checker) throws IOException {
checker.registerConflictCheck("positive_score_impact", b -> b.field("positive_score_impact", false)); checker.registerConflictCheck("positive_score_impact", b -> b.field("positive_score_impact", false));
@ -81,21 +74,11 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatur
return freq; return freq;
} }
@Override
protected RankFeatureFieldMapper.Builder newBuilder() {
return new RankFeatureFieldMapper.Builder("rank-feature");
}
@Override @Override
protected void minimalMapping(XContentBuilder b) throws IOException { protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "rank_feature"); b.field("type", "rank_feature");
} }
@Override
protected boolean supportsMeta() {
return false;
}
public void testDefaults() throws Exception { public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
@ -103,7 +86,7 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatur
ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10))); ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature"); IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length); assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class)); assertThat(fields[0], instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0]; FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12))); ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));
@ -122,7 +105,7 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatur
ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10))); ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature"); IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length); assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class)); assertThat(fields[0], instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0]; FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12))); ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));

View File

@ -29,9 +29,8 @@ import org.hamcrest.Matchers;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Set;
public class RankFeaturesFieldMapperTests extends FieldMapperTestCase2<RankFeaturesFieldMapper.Builder> { public class RankFeaturesFieldMapperTests extends MapperTestCase {
@Override @Override
protected void writeFieldValue(XContentBuilder builder) throws IOException { protected void writeFieldValue(XContentBuilder builder) throws IOException {
@ -44,11 +43,6 @@ public class RankFeaturesFieldMapperTests extends FieldMapperTestCase2<RankFeatu
assertEquals("[rank_features] fields do not support [exists] queries", iae.getMessage()); assertEquals("[rank_features] fields do not support [exists] queries", iae.getMessage());
} }
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
}
@Override @Override
protected Collection<? extends Plugin> getPlugins() { protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin()); return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin());
@ -116,9 +110,4 @@ public class RankFeaturesFieldMapperTests extends FieldMapperTestCase2<RankFeatu
assertEquals("[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " + assertEquals("[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " +
"the same document", e.getCause().getMessage()); "the same document", e.getCause().getMessage());
} }
@Override
protected RankFeaturesFieldMapper.Builder newBuilder() {
return new RankFeaturesFieldMapper.Builder("rf");
}
} }

View File

@ -73,7 +73,7 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsInstanceOf.instanceOf;
public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<SearchAsYouTypeFieldMapper.Builder> { public class SearchAsYouTypeFieldMapperTests extends MapperTestCase {
@Override @Override
protected void registerParameters(ParameterChecker checker) throws IOException { protected void registerParameters(ParameterChecker checker) throws IOException {
@ -125,24 +125,11 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
builder.value("new york city"); builder.value("new york city");
} }
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("doc_values");
}
@Override @Override
protected Collection<? extends Plugin> getPlugins() { protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin()); return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin());
} }
@Override
protected SearchAsYouTypeFieldMapper.Builder newBuilder() {
return new SearchAsYouTypeFieldMapper.Builder("sayt")
.indexAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()))
.searchAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()))
.searchQuoteAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()));
}
@Override @Override
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) { protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
NamedAnalyzer dflt = new NamedAnalyzer( NamedAnalyzer dflt = new NamedAnalyzer(
@ -197,7 +184,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, "default", prefixFieldMapper.fieldType()); getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, "default", prefixFieldMapper.fieldType());
assertShingleFieldType( assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, "default", prefixFieldMapper.fieldType()); getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, "default", prefixFieldMapper.fieldType());
} }
public void testConfiguration() throws IOException { public void testConfiguration() throws IOException {
int maxShingleSize = 4; int maxShingleSize = 4;
@ -240,7 +227,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
b.endObject(); b.endObject();
b.startObject("b_field").field("type", "text").endObject(); b.startObject("b_field").field("type", "text").endObject();
}))); })));
assertThat(e.getMessage(), containsString("different [max_shingle_size]")); assertThat(e.getMessage(), containsString("Cannot update parameter [max_shingle_size]"));
} }
public void testMultiFields() throws IOException { public void testMultiFields() throws IOException {
@ -272,7 +259,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
assertThat(fieldType, instanceOf(ShingleFieldType.class)); assertThat(fieldType, instanceOf(ShingleFieldType.class));
ShingleFieldType ft = (ShingleFieldType) fieldType; ShingleFieldType ft = (ShingleFieldType) fieldType;
assertEquals(i, ft.shingleSize); assertEquals(i, ft.shingleSize);
assertTrue(prefixFieldType == ft.prefixFieldType); assertSame(prefixFieldType, ft.prefixFieldType);
} }
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "new york city"))); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "new york city")));
@ -288,8 +275,10 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
fieldMapping(b -> b.field("type", "search_as_you_type").field("index_options", "offsets")) fieldMapping(b -> b.field("type", "search_as_you_type").field("index_options", "offsets"))
); );
assertThat(getRootFieldMapper(mapper, "field").fieldType().fieldType.indexOptions(),
equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS));
Stream.of( Stream.of(
getRootFieldMapper(mapper, "field"),
getPrefixFieldMapper(mapper, "field._index_prefix"), getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram") getShingleFieldMapper(mapper, "field._3gram")
@ -300,7 +289,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
public void testStore() throws IOException { public void testStore() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("store", true))); DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("store", true)));
assertTrue(getRootFieldMapper(mapper, "field").fieldType.stored()); assertTrue(getRootFieldMapper(mapper, "field").fieldType().fieldType.stored());
Stream.of( Stream.of(
getPrefixFieldMapper(mapper, "field._index_prefix"), getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._2gram"),
@ -322,8 +311,9 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
public void testTermVectors() throws IOException { public void testTermVectors() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("term_vector", "yes"))); DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("term_vector", "yes")));
assertTrue(getRootFieldMapper(mapper, "field").fieldType().fieldType.storeTermVectors());
Stream.of( Stream.of(
getRootFieldMapper(mapper, "field"),
getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram") getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertTrue("for " + m.name(), m.fieldType.storeTermVectors())); ).forEach(m -> assertTrue("for " + m.name(), m.fieldType.storeTermVectors()));
@ -351,8 +341,9 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
{ {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("norms", false))); DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("norms", false)));
assertTrue(getRootFieldMapper(mapper, "field").fieldType().fieldType.omitNorms());
Stream.of( Stream.of(
getRootFieldMapper(mapper, "field"),
getPrefixFieldMapper(mapper, "field._index_prefix"), getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram") getShingleFieldMapper(mapper, "field._3gram")
@ -360,7 +351,6 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
} }
} }
public void testDocumentParsingSingleValue() throws IOException { public void testDocumentParsingSingleValue() throws IOException {
documentParsingTestCase(Collections.singleton(randomAlphaOfLengthBetween(5, 20))); documentParsingTestCase(Collections.singleton(randomAlphaOfLengthBetween(5, 20)));
} }
@ -578,7 +568,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
IndexableField[] prefixFields = parsedDocument.rootDoc().getFields("field._index_prefix"); IndexableField[] prefixFields = parsedDocument.rootDoc().getFields("field._index_prefix");
IndexableField[] shingle2Fields = parsedDocument.rootDoc().getFields("field._2gram"); IndexableField[] shingle2Fields = parsedDocument.rootDoc().getFields("field._2gram");
IndexableField[] shingle3Fields = parsedDocument.rootDoc().getFields("field._3gram"); IndexableField[] shingle3Fields = parsedDocument.rootDoc().getFields("field._3gram");
for (IndexableField[] fields : new IndexableField[][]{ rootFields, prefixFields, shingle2Fields, shingle3Fields}) { for (IndexableField[] fields : new IndexableField[][]{rootFields, prefixFields, shingle2Fields, shingle3Fields}) {
Set<String> expectedValues = Arrays.stream(fields).map(IndexableField::stringValue).collect(Collectors.toSet()); Set<String> expectedValues = Arrays.stream(fields).map(IndexableField::stringValue).collect(Collectors.toSet());
assertThat(values, equalTo(expectedValues)); assertThat(values, equalTo(expectedValues));
} }
@ -611,7 +601,7 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<Search
String analyzerName, String analyzerName,
PrefixFieldType prefixFieldType) { PrefixFieldType prefixFieldType) {
assertThat(fieldType.shingleFields.length, equalTo(maxShingleSize-1)); assertThat(fieldType.shingleFields.length, equalTo(maxShingleSize - 1));
for (NamedAnalyzer analyzer : asList(fieldType.indexAnalyzer(), fieldType.getTextSearchInfo().getSearchAnalyzer())) { for (NamedAnalyzer analyzer : asList(fieldType.indexAnalyzer(), fieldType.getTextSearchInfo().getSearchAnalyzer())) {
assertThat(analyzer.name(), equalTo(analyzerName)); assertThat(analyzer.name(), equalTo(analyzerName));
} }

View File

@ -49,8 +49,14 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase {
UNSEARCHABLE.freeze(); UNSEARCHABLE.freeze();
} }
private static final FieldType SEARCHABLE = new FieldType();
static {
SEARCHABLE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
SEARCHABLE.freeze();
}
private static SearchAsYouTypeFieldType createFieldType() { private static SearchAsYouTypeFieldType createFieldType() {
final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType(NAME, Defaults.FIELD_TYPE, null, final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType(NAME, SEARCHABLE, null,
Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap()); Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap());
fieldType.setPrefixField(new PrefixFieldType(NAME, TextSearchInfo.SIMPLE_MATCH_ONLY, Defaults.MIN_GRAM, Defaults.MAX_GRAM)); fieldType.setPrefixField(new PrefixFieldType(NAME, TextSearchInfo.SIMPLE_MATCH_ONLY, Defaults.MIN_GRAM, Defaults.MAX_GRAM));
fieldType.setShingleFields(new ShingleFieldType[] { fieldType.setShingleFields(new ShingleFieldType[] {

View File

@ -24,64 +24,74 @@ import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.common.Strings; import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.elasticsearch.common.bytes.BytesReference; import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
/** /**
* Test for {@link TokenCountFieldMapper}. * Test for {@link TokenCountFieldMapper}.
*/ */
public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { public class TokenCountFieldMapperTests extends MapperTestCase {
@Override @Override
protected Collection<Class<? extends Plugin>> getPlugins() { protected Collection<Plugin> getPlugins() {
return pluginList(InternalSettingsPlugin.class, MapperExtrasPlugin.class); return Collections.singletonList(new MapperExtrasPlugin());
} }
public void testMerge() throws IOException { @Override
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() protected void minimalMapping(XContentBuilder b) throws IOException {
.startObject("person") b.field("type", "token_count").field("analyzer", "keyword");
.startObject("properties") }
.startObject("tc")
.field("type", "token_count")
.field("analyzer", "keyword")
.endObject()
.endObject()
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = mapperService.merge("person",
new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE);
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() @Override
.startObject("person") protected void writeFieldValue(XContentBuilder builder) throws IOException {
.startObject("properties") builder.value("some words");
.startObject("tc") }
.field("type", "token_count")
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper stage2 = mapperService.merge("person",
new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
// previous mapper has not been modified @Override
assertThat(((TokenCountFieldMapper) stage1.mappers().getMapper("tc")).analyzer(), equalTo("keyword")); protected void registerParameters(ParameterChecker checker) throws IOException {
// but the new one has the change checker.registerConflictCheck("index", b -> b.field("index", false));
assertThat(((TokenCountFieldMapper) stage2.mappers().getMapper("tc")).analyzer(), equalTo("standard")); checker.registerConflictCheck("store", b -> b.field("store", true));
checker.registerConflictCheck("doc_values", b -> b.field("doc_values", false));
checker.registerConflictCheck("null_value", b -> b.field("null_value", 1));
checker.registerConflictCheck("enable_position_increments", b -> b.field("enable_position_increments", false));
checker.registerUpdateCheck(
this::minimalMapping,
b -> b.field("type", "token_count").field("analyzer", "standard"),
m -> {
TokenCountFieldMapper tcfm = (TokenCountFieldMapper) m;
assertThat(tcfm.analyzer(), equalTo("standard"));
});
}
@Override
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
NamedAnalyzer dflt = new NamedAnalyzer(
"default",
AnalyzerScope.INDEX,
new StandardAnalyzer()
);
NamedAnalyzer standard = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
NamedAnalyzer keyword = new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer());
Map<String, NamedAnalyzer> analyzers = new HashMap<>();
analyzers.put("default", dflt);
analyzers.put("standard", standard);
analyzers.put("keyword", keyword);
return new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap());
} }
/** /**
@ -120,33 +130,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
Collections.shuffle(Arrays.asList(tokens), random()); Collections.shuffle(Arrays.asList(tokens), random());
final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens);
// TODO: we have no CannedAnalyzer? // TODO: we have no CannedAnalyzer?
Analyzer analyzer = new Analyzer() { return new Analyzer() {
@Override @Override
public TokenStreamComponents createComponents(String fieldName) { public TokenStreamComponents createComponents(String fieldName) {
return new TokenStreamComponents(new MockTokenizer(), tokenStream); return new TokenStreamComponents(new MockTokenizer(), tokenStream);
} }
}; };
return analyzer;
}
public void testEmptyName() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("")
.field("type", "token_count")
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject().endObject());
// Empty name not allowed in index created after 5.0
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
} }
public void testParseNullValue() throws Exception { public void testParseNullValue() throws Exception {
@ -168,31 +157,27 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
} }
private DocumentMapper createIndexWithTokenCountField() throws IOException { private DocumentMapper createIndexWithTokenCountField() throws IOException {
final String content = Strings.toString(XContentFactory.jsonBuilder().startObject() return createDocumentMapper(mapping(b -> {
.startObject("person") b.startObject("test");
.startObject("properties") {
.startObject("test") b.field("type", "text");
.field("type", "text") b.startObject("fields");
.startObject("fields") {
.startObject("tc") b.startObject("tc");
.field("type", "token_count") {
.field("analyzer", "standard") b.field("type", "token_count");
.endObject() b.field("analyzer", "standard");
.endObject() }
.endObject() b.endObject();
.endObject() }
.endObject().endObject()); b.endObject();
}
return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content)); b.endObject();
}));
} }
private SourceToParse createDocument(String fieldValue) throws Exception { private SourceToParse createDocument(String fieldValue) throws Exception {
BytesReference request = BytesReference.bytes(XContentFactory.jsonBuilder() return source(b -> b.field("test", fieldValue));
.startObject()
.field("test", fieldValue)
.endObject());
return new SourceToParse("test", "person", "1", request, XContentType.JSON);
} }
private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) { private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) {

View File

@ -36,7 +36,6 @@ import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.NumberType;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -125,9 +124,11 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
private final Parameter<NamedAnalyzer> analyzer; private final Parameter<NamedAnalyzer> analyzer;
private final Parameter<NamedAnalyzer> searchAnalyzer; private final Parameter<NamedAnalyzer> searchAnalyzer;
private final Parameter<Boolean> preserveSeparators = Parameter.boolParam("preserve_separators", false, private final Parameter<Boolean> preserveSeparators = Parameter.boolParam("preserve_separators", false,
m -> toType(m).preserveSeparators, Defaults.DEFAULT_PRESERVE_SEPARATORS); m -> toType(m).preserveSeparators, Defaults.DEFAULT_PRESERVE_SEPARATORS)
.alwaysSerialize();
private final Parameter<Boolean> preservePosInc = Parameter.boolParam("preserve_position_increments", false, private final Parameter<Boolean> preservePosInc = Parameter.boolParam("preserve_position_increments", false,
m -> toType(m).preservePosInc, Defaults.DEFAULT_POSITION_INCREMENTS); m -> toType(m).preservePosInc, Defaults.DEFAULT_POSITION_INCREMENTS)
.alwaysSerialize();
private final Parameter<ContextMappings> contexts = new Parameter<>("contexts", false, () -> null, private final Parameter<ContextMappings> contexts = new Parameter<>("contexts", false, () -> null,
(n, c, o) -> ContextMappings.load(o, c.indexVersionCreated()), m -> toType(m).contexts) (n, c, o) -> ContextMappings.load(o, c.indexVersionCreated()), m -> toType(m).contexts)
.setSerializer((b, n, c) -> { .setSerializer((b, n, c) -> {
@ -141,7 +142,8 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
private final Parameter<Integer> maxInputLength = Parameter.intParam("max_input_length", true, private final Parameter<Integer> maxInputLength = Parameter.intParam("max_input_length", true,
m -> toType(m).maxInputLength, Defaults.DEFAULT_MAX_INPUT_LENGTH) m -> toType(m).maxInputLength, Defaults.DEFAULT_MAX_INPUT_LENGTH)
.addDeprecatedName("max_input_len") .addDeprecatedName("max_input_len")
.setValidator(Builder::validateInputLength); .setValidator(Builder::validateInputLength)
.alwaysSerialize();
private final Parameter<Map<String, String>> meta = Parameter.metaParam(); private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final NamedAnalyzer defaultAnalyzer; private final NamedAnalyzer defaultAnalyzer;
@ -156,7 +158,8 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
super(name); super(name);
this.defaultAnalyzer = defaultAnalyzer; this.defaultAnalyzer = defaultAnalyzer;
this.indexVersionCreated = indexVersionCreated; this.indexVersionCreated = indexVersionCreated;
this.analyzer = Parameter.analyzerParam("analyzer", false, m -> toType(m).analyzer, () -> defaultAnalyzer); this.analyzer = Parameter.analyzerParam("analyzer", false, m -> toType(m).analyzer, () -> defaultAnalyzer)
.alwaysSerialize();
this.searchAnalyzer this.searchAnalyzer
= Parameter.analyzerParam("search_analyzer", true, m -> toType(m).searchAnalyzer, analyzer::getValue); = Parameter.analyzerParam("search_analyzer", true, m -> toType(m).searchAnalyzer, analyzer::getValue);
} }
@ -169,26 +172,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
@Override @Override
protected List<Parameter<?>> getParameters() { protected List<Parameter<?>> getParameters() {
return Arrays.asList(analyzer, searchAnalyzer, preserveSeparators, preservePosInc, contexts, maxInputLength, meta); return Arrays.asList(analyzer, searchAnalyzer, preserveSeparators, preservePosInc, maxInputLength, contexts, meta);
}
@Override
protected void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
builder.field("analyzer", this.analyzer.getValue().name());
if (Objects.equals(this.analyzer.getValue().name(), this.searchAnalyzer.getValue().name()) == false) {
builder.field("search_analyzer", this.searchAnalyzer.getValue().name());
}
builder.field(this.preserveSeparators.name, this.preserveSeparators.getValue());
builder.field(this.preservePosInc.name, this.preservePosInc.getValue());
builder.field(this.maxInputLength.name, this.maxInputLength.getValue());
if (this.contexts.getValue() != null) {
builder.startArray(this.contexts.name);
this.contexts.getValue().toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endArray();
}
if (this.meta.getValue().isEmpty() == false) {
builder.field(this.meta.name, this.meta.getValue());
}
} }
@Override @Override

View File

@ -91,13 +91,8 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
private final Parameter<String> indexOptions private final Parameter<String> indexOptions
= Parameter.restrictedStringParam("index_options", false, m -> toType(m).indexOptions, "docs", "freqs"); = Parameter.restrictedStringParam("index_options", false, m -> toType(m).indexOptions, "docs", "freqs");
private final Parameter<Boolean> hasNorms private final Parameter<Boolean> hasNorms = TextParams.norms(false, m -> toType(m).fieldType.omitNorms() == false);
= Parameter.boolParam("norms", true, m -> toType(m).fieldType.omitNorms() == false, false) private final Parameter<SimilarityProvider> similarity = TextParams.similarity(m -> toType(m).similarity);
.setMergeValidator((o, n) -> o == n || (o && n == false)); // norms can be updated from 'true' to 'false' but not vv
private final Parameter<SimilarityProvider> similarity = new Parameter<>("similarity", false, () -> null,
(n, c, o) -> TypeParsers.resolveSimilarity(c, n, o), m -> toType(m).similarity)
.setSerializer((b, f, v) -> b.field(f, v == null ? null : v.name()), v -> v == null ? null : v.name())
.acceptsNull();
private final Parameter<String> normalizer private final Parameter<String> normalizer
= Parameter.stringParam("normalizer", false, m -> toType(m).normalizerName, "default"); = Parameter.stringParam("normalizer", false, m -> toType(m).normalizerName, "default");
@ -139,19 +134,6 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
return this; return this;
} }
private static IndexOptions toIndexOptions(boolean indexed, String in) {
if (indexed == false) {
return IndexOptions.NONE;
}
switch (in) {
case "docs":
return IndexOptions.DOCS;
case "freqs":
return IndexOptions.DOCS_AND_FREQS;
}
throw new MapperParsingException("Unknown index option [" + in + "]");
}
@Override @Override
protected List<Parameter<?>> getParameters() { protected List<Parameter<?>> getParameters() {
return Arrays.asList(indexed, hasDocValues, stored, nullValue, eagerGlobalOrdinals, ignoreAbove, return Arrays.asList(indexed, hasDocValues, stored, nullValue, eagerGlobalOrdinals, ignoreAbove,
@ -186,7 +168,7 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
public KeywordFieldMapper build(BuilderContext context) { public KeywordFieldMapper build(BuilderContext context) {
FieldType fieldtype = new FieldType(Defaults.FIELD_TYPE); FieldType fieldtype = new FieldType(Defaults.FIELD_TYPE);
fieldtype.setOmitNorms(this.hasNorms.getValue() == false); fieldtype.setOmitNorms(this.hasNorms.getValue() == false);
fieldtype.setIndexOptions(toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue())); fieldtype.setIndexOptions(TextParams.toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue()));
fieldtype.setStored(this.stored.getValue()); fieldtype.setStored(this.stored.getValue());
return new KeywordFieldMapper(name, fieldtype, buildFieldType(context, fieldtype), return new KeywordFieldMapper(name, fieldtype, buildFieldType(context, fieldtype),
multiFieldsBuilder.build(this, context), copyTo.build(), this); multiFieldsBuilder.build(this, context), copyTo.build(), this);

View File

@ -148,6 +148,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
private Consumer<T> validator = null; private Consumer<T> validator = null;
private Serializer<T> serializer = XContentBuilder::field; private Serializer<T> serializer = XContentBuilder::field;
private BooleanSupplier serializerPredicate = () -> true; private BooleanSupplier serializerPredicate = () -> true;
private boolean alwaysSerialize = false;
private Function<T, String> conflictSerializer = Objects::toString; private Function<T, String> conflictSerializer = Objects::toString;
private BiPredicate<T, T> mergeValidator; private BiPredicate<T, T> mergeValidator;
private T value; private T value;
@ -242,6 +243,14 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
return this; return this;
} }
/**
* Ensures that this parameter is always serialized, no matter its value
*/
public Parameter<T> alwaysSerialize() {
this.alwaysSerialize = true;
return this;
}
/** /**
* Sets a custom merge validator. By default, merges are accepted if the * Sets a custom merge validator. By default, merges are accepted if the
* parameter is updateable, or if the previous and new values are equal * parameter is updateable, or if the previous and new values are equal
@ -276,7 +285,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
} }
private void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException { private void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
if ((includeDefaults || isConfigured()) && serializerPredicate.getAsBoolean()) { if (alwaysSerialize || ((includeDefaults || isConfigured()) && serializerPredicate.getAsBoolean())) {
serializer.serialize(builder, name, getValue()); serializer.serialize(builder, name, getValue());
} }
} }
@ -528,7 +537,7 @@ public abstract class ParametrizedFieldMapper extends FieldMapper {
/** /**
* Writes the current builder parameter values as XContent * Writes the current builder parameter values as XContent
*/ */
protected void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException { protected final void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
for (Parameter<?> parameter : getParameters()) { for (Parameter<?> parameter : getParameters()) {
parameter.toXContent(builder, includeDefaults); parameter.toXContent(builder, includeDefaults);
} }

View File

@ -0,0 +1,145 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper.Parameter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* Utility functions for text mapper parameters
*/
public final class TextParams {
private TextParams() {}
public static final class Analyzers {
public final Parameter<NamedAnalyzer> indexAnalyzer;
public final Parameter<NamedAnalyzer> searchAnalyzer;
public final Parameter<NamedAnalyzer> searchQuoteAnalyzer;
public Analyzers(Supplier<NamedAnalyzer> defaultAnalyzer) {
this.indexAnalyzer = Parameter.analyzerParam("analyzer", false,
m -> m.fieldType().indexAnalyzer(), defaultAnalyzer);
this.searchAnalyzer
= Parameter.analyzerParam("search_analyzer", true,
m -> m.fieldType().getTextSearchInfo().getSearchAnalyzer(), indexAnalyzer::getValue);
this.searchQuoteAnalyzer
= Parameter.analyzerParam("search_quote_analyzer", true,
m -> m.fieldType().getTextSearchInfo().getSearchQuoteAnalyzer(), searchAnalyzer::getValue);
}
public NamedAnalyzer getIndexAnalyzer() {
return indexAnalyzer.getValue();
}
public NamedAnalyzer getSearchAnalyzer() {
return searchAnalyzer.getValue();
}
public NamedAnalyzer getSearchQuoteAnalyzer() {
return searchQuoteAnalyzer.getValue();
}
}
public static Parameter<Boolean> norms(boolean defaultValue, Function<FieldMapper, Boolean> initializer) {
return Parameter.boolParam("norms", true, initializer, defaultValue)
.setMergeValidator((o, n) -> o == n || (o && n == false)); // norms can be updated from 'true' to 'false' but not vv
}
public static Parameter<SimilarityProvider> similarity(Function<FieldMapper, SimilarityProvider> init) {
return new Parameter<>("similarity", false, () -> null,
(n, c, o) -> TypeParsers.resolveSimilarity(c, n, o), init)
.setSerializer((b, f, v) -> b.field(f, v == null ? null : v.name()), v -> v == null ? null : v.name())
.acceptsNull();
}
public static Parameter<String> indexOptions(Function<FieldMapper, String> initializer) {
return Parameter.restrictedStringParam("index_options", false, initializer,
"positions", "docs", "freqs", "offsets");
}
public static IndexOptions toIndexOptions(boolean indexed, String indexOptions) {
if (indexed == false) {
return IndexOptions.NONE;
}
switch (indexOptions) {
case "docs":
return IndexOptions.DOCS;
case "freqs":
return IndexOptions.DOCS_AND_FREQS;
case "positions":
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
case "offsets":
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
}
throw new IllegalArgumentException("Unknown [index_options] value: [" + indexOptions + "]");
}
public static Parameter<String> termVectors(Function<FieldMapper, String> initializer) {
return Parameter.restrictedStringParam("term_vector", false, initializer,
"no",
"yes",
"with_positions",
"with_offsets",
"with_positions_offsets",
"with_positions_payloads",
"with_positions_offsets_payloads");
}
public static void setTermVectorParams(String configuration, FieldType fieldType) {
switch (configuration) {
case "no":
fieldType.setStoreTermVectors(false);
return;
case "yes":
fieldType.setStoreTermVectors(true);
return;
case "with_positions":
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
return;
case "with_offsets":
case "with_positions_offsets":
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorOffsets(true);
return;
case "with_positions_payloads":
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorPayloads(true);
return;
case "with_positions_offsets_payloads":
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorOffsets(true);
fieldType.setStoreTermVectorPayloads(true);
return;
}
throw new IllegalArgumentException("Unknown [term_vector] setting: [" + configuration + "]");
}
}