Add declarative parameters to FieldMappers (#58663)
The FieldMapper infrastructure currently has a bunch of shared parameters, many of which are only applicable to a subset of the 41 mapper implementations we ship with. Merging, parsing and serialization of these parameters are spread around the class hierarchy, with much repetitive boilerplate code required. It would be much easier to reason about these things if we could declare the parameter set of each FieldMapper directly in the implementing class, and share the parsing, merging and serialization logic instead. This commit is a first effort at introducing a declarative parameter style. It adds a new FieldMapper subclass, ParametrizedFieldMapper, and refactors two mappers, Boolean and Binary, to use it. Parameters are declared on Builder classes, with the declaration including the parameter name, whether or not it is updateable, a default value, how to parse it from mappings, and how to extract it from another mapper at merge time. Builders have a getParameters method, which returns a list of the declared parameters; this is then used for parsing, merging and serialization. Merging is achieved by constructing a new Builder from the existing Mapper, and merging in values from the merging Mapper; conflicts are all caught at this point, and if none exist then a new, merged, Mapper can be built from the Builder. This allows all values on the Mapper to be final. Other mappers can be gradually migrated to this new style, and once they have all been refactored we can merge ParametrizedFieldMapper and FieldMapper entirely.
This commit is contained in:
parent
daa48329ec
commit
67a27e2b9d
|
@ -159,10 +159,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context) {
|
||||
BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(QUERY_BUILDER_FIELD_NAME);
|
||||
builder.docValues(true);
|
||||
builder.indexOptions(IndexOptions.NONE);
|
||||
builder.store(false);
|
||||
BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(QUERY_BUILDER_FIELD_NAME, true);
|
||||
return builder.build(context);
|
||||
}
|
||||
|
||||
|
|
|
@ -332,6 +332,16 @@ public class XContentMapValues {
|
|||
return node.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Object#toString} value of its input, or {@code null} if the input is null
|
||||
*/
|
||||
public static String nodeStringValue(Object node) {
|
||||
if (node == null) {
|
||||
return null;
|
||||
}
|
||||
return node.toString();
|
||||
}
|
||||
|
||||
public static float nodeFloatValue(Object node, float defaultValue) {
|
||||
if (node == null) {
|
||||
return defaultValue;
|
||||
|
|
|
@ -20,9 +20,7 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -43,47 +41,45 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
||||
|
||||
public class BinaryFieldMapper extends FieldMapper {
|
||||
public class BinaryFieldMapper extends ParametrizedFieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "binary";
|
||||
|
||||
public static class Defaults {
|
||||
public static final FieldType FIELD_TYPE = new FieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
private static BinaryFieldMapper toType(FieldMapper in) {
|
||||
return (BinaryFieldMapper) in;
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder> {
|
||||
public static class Builder extends ParametrizedFieldMapper.Builder {
|
||||
|
||||
private final Parameter<Boolean> stored = Parameter.boolParam("store", false, m -> toType(m).stored, false);
|
||||
private final Parameter<Boolean> hasDocValues = Parameter.boolParam("doc_values", false, m -> toType(m).hasDocValues, false);
|
||||
private final Parameter<Map<String, String>> meta
|
||||
= new Parameter<>("meta", true, Collections.emptyMap(), TypeParsers::parseMeta, m -> m.fieldType().meta());
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
hasDocValues = false;
|
||||
builder = this;
|
||||
this(name, false);
|
||||
}
|
||||
|
||||
public Builder(String name, boolean hasDocValues) {
|
||||
super(name);
|
||||
this.hasDocValues.setValue(hasDocValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Parameter<?>> getParameters() {
|
||||
return Arrays.asList(meta, stored, hasDocValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BinaryFieldMapper build(BuilderContext context) {
|
||||
return new BinaryFieldMapper(name, fieldType, new BinaryFieldType(buildFullName(context), hasDocValues, meta),
|
||||
multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder index(boolean index) {
|
||||
if (index) {
|
||||
throw new MapperParsingException("Binary field [" + name() + "] cannot be indexed");
|
||||
}
|
||||
return builder;
|
||||
return new BinaryFieldMapper(name, new BinaryFieldType(buildFullName(context), hasDocValues.getValue(), meta.getValue()),
|
||||
multiFieldsBuilder.build(this, context), copyTo.build(), this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,7 +88,7 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
public BinaryFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
builder.parse(name, parserContext, node);
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -167,14 +163,19 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected BinaryFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo);
|
||||
private final boolean stored;
|
||||
private final boolean hasDocValues;
|
||||
|
||||
protected BinaryFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo, Builder builder) {
|
||||
super(simpleName, mappedFieldType, multiFields, copyTo);
|
||||
this.stored = builder.stored.getValue();
|
||||
this.hasDocValues = builder.hasDocValues.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
if (!fieldType.stored() && !fieldType().hasDocValues()) {
|
||||
if (stored == false && hasDocValues == false) {
|
||||
return;
|
||||
}
|
||||
byte[] value = context.parseExternalValue(byte[].class);
|
||||
|
@ -188,11 +189,11 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
if (fieldType.stored()) {
|
||||
if (stored) {
|
||||
context.doc().add(new StoredField(fieldType().name(), value));
|
||||
}
|
||||
|
||||
if (fieldType().hasDocValues()) {
|
||||
if (hasDocValues) {
|
||||
CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name());
|
||||
if (field == null) {
|
||||
field = new CustomBinaryDocValuesField(fieldType().name(), value);
|
||||
|
@ -210,18 +211,8 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean docValuesByDefault() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean indexedByDefault() {
|
||||
return false;
|
||||
public ParametrizedFieldMapper.Builder getMergeBuilder() {
|
||||
return new BinaryFieldMapper.Builder(simpleName()).init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
|
@ -30,7 +31,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -41,17 +41,15 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
* A field mapper for boolean fields.
|
||||
*/
|
||||
public class BooleanFieldMapper extends FieldMapper {
|
||||
public class BooleanFieldMapper extends ParametrizedFieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "boolean";
|
||||
|
||||
|
@ -71,25 +69,37 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
public static final BytesRef FALSE = new BytesRef("F");
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder> {
|
||||
private static BooleanFieldMapper toType(FieldMapper in) {
|
||||
return (BooleanFieldMapper) in;
|
||||
}
|
||||
|
||||
private Boolean nullValue;
|
||||
public static class Builder extends ParametrizedFieldMapper.Builder {
|
||||
|
||||
private final Parameter<Boolean> docValues = Parameter.boolParam("doc_values", false, m -> toType(m).hasDocValues, true);
|
||||
private final Parameter<Boolean> indexed = Parameter.boolParam("index", false, m -> toType(m).indexed, true);
|
||||
private final Parameter<Boolean> stored = Parameter.boolParam("store", false, m -> toType(m).stored, false);
|
||||
|
||||
private final Parameter<Boolean> nullValue
|
||||
= new Parameter<>("null_value", false, null, (n, o) -> XContentMapValues.nodeBooleanValue(o), m -> toType(m).nullValue);
|
||||
|
||||
private final Parameter<Float> boost = Parameter.floatParam("boost", true, m -> m.fieldType().boost(), 1.0f);
|
||||
private final Parameter<Map<String, String>> meta
|
||||
= new Parameter<>("meta", true, Collections.emptyMap(), TypeParsers::parseMeta, m -> m.fieldType().meta());
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
this.builder = this;
|
||||
super(name);
|
||||
}
|
||||
|
||||
public Builder nullValue(Boolean nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return builder;
|
||||
@Override
|
||||
protected List<Parameter<?>> getParameters() {
|
||||
return Arrays.asList(meta, boost, docValues, indexed, nullValue, stored);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BooleanFieldMapper build(BuilderContext context) {
|
||||
return new BooleanFieldMapper(name, fieldType,
|
||||
new BooleanFieldType(buildFullName(context), indexed, hasDocValues, meta),
|
||||
multiFieldsBuilder.build(this, context), copyTo, nullValue);
|
||||
MappedFieldType ft = new BooleanFieldType(buildFullName(context), indexed.getValue(), docValues.getValue(), meta.getValue());
|
||||
ft.setBoost(boost.getValue());
|
||||
return new BooleanFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,19 +108,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
public BooleanFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("null_value")) {
|
||||
if (propNode == null) {
|
||||
throw new MapperParsingException("Property [null_value] cannot be null.");
|
||||
}
|
||||
builder.nullValue(XContentMapValues.nodeBooleanValue(propNode, name + ".null_value"));
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
builder.parse(name, parserContext, node);
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -217,11 +215,17 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
private final Boolean nullValue;
|
||||
private final boolean indexed;
|
||||
private final boolean hasDocValues;
|
||||
private final boolean stored;
|
||||
|
||||
protected BooleanFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo, Boolean nullValue) {
|
||||
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
protected BooleanFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo, Builder builder) {
|
||||
super(simpleName, mappedFieldType, multiFields, copyTo);
|
||||
this.nullValue = builder.nullValue.getValue();
|
||||
this.stored = builder.stored.getValue();
|
||||
this.indexed = builder.indexed.getValue();
|
||||
this.hasDocValues = builder.docValues.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -231,7 +235,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
if (fieldType().isSearchable() == false && !fieldType.stored() && !fieldType().hasDocValues()) {
|
||||
if (indexed == false && stored == false && hasDocValues == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -250,10 +254,13 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
if (fieldType().isSearchable() || fieldType.stored()) {
|
||||
context.doc().add(new Field(fieldType().name(), value ? "T" : "F", fieldType));
|
||||
if (indexed) {
|
||||
context.doc().add(new Field(fieldType().name(), value ? "T" : "F", Defaults.FIELD_TYPE));
|
||||
}
|
||||
if (fieldType().hasDocValues()) {
|
||||
if (stored) {
|
||||
context.doc().add(new StoredField(fieldType().name(), value ? "T" : "F"));
|
||||
}
|
||||
if (hasDocValues) {
|
||||
context.doc().add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0));
|
||||
} else {
|
||||
createFieldNamesField(context);
|
||||
|
@ -261,8 +268,8 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
// TODO ban updating null values
|
||||
public ParametrizedFieldMapper.Builder getMergeBuilder() {
|
||||
return new Builder(simpleName()).init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -270,11 +277,4 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
} else if (Fields.CONTEXTS.match(fieldName, LoggingDeprecationHandler.INSTANCE)) {
|
||||
builder.contextMappings(ContextMappings.load(fieldNode, parserContext.indexVersionCreated()));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
||||
} else if (parseMultiField(builder::addMultiField, name, parserContext, fieldName, fieldNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,13 @@ public final class ContentPath {
|
|||
this.index = 0;
|
||||
}
|
||||
|
||||
public ContentPath(String path) {
|
||||
this.sb = new StringBuilder();
|
||||
this.offset = 0;
|
||||
this.index = 0;
|
||||
add(path);
|
||||
}
|
||||
|
||||
public void add(String name) {
|
||||
path[index++] = name;
|
||||
if (index == path.length) { // expand if needed
|
||||
|
|
|
@ -292,7 +292,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
} else if (propName.equals("format")) {
|
||||
builder.format(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
} else if (TypeParsers.parseMultiField(builder::addMultiField, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.AbstractXContentParser;
|
||||
|
@ -292,7 +293,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final FieldMapper merge(Mapper mergeWith) {
|
||||
public FieldMapper merge(Mapper mergeWith) {
|
||||
FieldMapper merged = clone();
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
if (mergeWith instanceof FieldMapper == false) {
|
||||
|
@ -487,7 +488,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
|
||||
protected abstract String contentType();
|
||||
|
||||
public static class MultiFields {
|
||||
public static class MultiFields implements Iterable<Mapper> {
|
||||
|
||||
public static MultiFields empty() {
|
||||
return new MultiFields(ImmutableOpenMap.<String, FieldMapper>of());
|
||||
|
@ -502,8 +503,29 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder add(Mapper mapper) {
|
||||
mapperBuilders.put(mapper.simpleName(), new Mapper.Builder(mapper.simpleName()) {
|
||||
@Override
|
||||
public Mapper build(BuilderContext context) {
|
||||
return mapper;
|
||||
}
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder update(Mapper toMerge, ContentPath contentPath) {
|
||||
if (mapperBuilders.containsKey(toMerge.simpleName()) == false) {
|
||||
add(toMerge);
|
||||
} else {
|
||||
Mapper.Builder builder = mapperBuilders.get(toMerge.simpleName());
|
||||
Mapper existing = builder.build(new BuilderContext(Settings.EMPTY, contentPath));
|
||||
add(existing.merge(toMerge));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) {
|
||||
public MultiFields build(Mapper.Builder mainFieldBuilder, BuilderContext context) {
|
||||
if (mapperBuilders.isEmpty()) {
|
||||
return empty();
|
||||
} else {
|
||||
|
@ -568,6 +590,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return new MultiFields(mappers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
|
||||
}
|
||||
|
@ -634,6 +657,11 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
return new CopyTo(Collections.unmodifiableList(copyToBuilders));
|
||||
}
|
||||
|
||||
public void reset(CopyTo copyTo) {
|
||||
copyToBuilders.clear();
|
||||
copyToBuilders.addAll(copyTo.copyToFields);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> copyToFields() {
|
||||
|
|
|
@ -130,7 +130,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||
} else if (propName.equals("ignore_malformed")) {
|
||||
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode, name + ".ignore_malformed"));
|
||||
iterator.remove();
|
||||
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
} else if (TypeParsers.parseMultiField(builder::addMultiField, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,345 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Defines how a particular field should be indexed and searched
|
||||
*
|
||||
* Configuration {@link Parameter}s for the mapper are defined on a {@link Builder} subclass,
|
||||
* and returned by its {@link Builder#getParameters()} method. Merging, serialization
|
||||
* and parsing of the mapper are all mediated through this set of parameters.
|
||||
*
|
||||
* Subclasses should implement a {@link Builder} that is returned from the
|
||||
* {@link #getMergeBuilder()} method, initialised with the existing builder.
|
||||
*/
|
||||
public abstract class ParametrizedFieldMapper extends FieldMapper {
|
||||
|
||||
/**
|
||||
* Creates a new ParametrizedFieldMapper
|
||||
*/
|
||||
protected ParametrizedFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, new FieldType(), mappedFieldType, multiFields, copyTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link Builder} to be used for merging and serialization
|
||||
*
|
||||
* Implement as follows:
|
||||
* {@code return new MyBuilder(simpleName()).init(this); }
|
||||
*/
|
||||
public abstract ParametrizedFieldMapper.Builder getMergeBuilder();
|
||||
|
||||
@Override
|
||||
public final ParametrizedFieldMapper merge(Mapper mergeWith) {
|
||||
|
||||
if (mergeWith instanceof FieldMapper == false) {
|
||||
throw new IllegalArgumentException("mapper [" + name() + "] cannot be changed from type ["
|
||||
+ contentType() + "] to [" + mergeWith.getClass().getSimpleName() + "]");
|
||||
}
|
||||
if (Objects.equals(this.getClass(), mergeWith.getClass()) == false) {
|
||||
throw new IllegalArgumentException("mapper [" + name() + "] cannot be changed from type ["
|
||||
+ contentType() + "] to [" + ((FieldMapper) mergeWith).contentType() + "]");
|
||||
}
|
||||
|
||||
ParametrizedFieldMapper.Builder builder = getMergeBuilder();
|
||||
Conflicts conflicts = new Conflicts(name());
|
||||
builder.merge((FieldMapper) mergeWith, conflicts);
|
||||
conflicts.check();
|
||||
return builder.build(new BuilderContext(Settings.EMPTY, parentPath(name())));
|
||||
}
|
||||
|
||||
private static ContentPath parentPath(String name) {
|
||||
int endPos = name.lastIndexOf(".");
|
||||
if (endPos == -1) {
|
||||
return new ContentPath(0);
|
||||
}
|
||||
return new ContentPath(name.substring(0, endPos));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void mergeOptions(FieldMapper other, List<String> conflicts) {
|
||||
// TODO remove when everything is parametrized
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
builder.field("type", contentType());
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
getMergeBuilder().toXContent(builder, includeDefaults);
|
||||
multiFields.toXContent(builder, params);
|
||||
copyTo.toXContent(builder, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* A configurable parameter for a field mapper
|
||||
* @param <T> the type of the value the parameter holds
|
||||
*/
|
||||
public static final class Parameter<T> {
|
||||
|
||||
public final String name;
|
||||
private final T defaultValue;
|
||||
private final BiFunction<String, Object, T> parser;
|
||||
private final Function<FieldMapper, T> initializer;
|
||||
private final boolean updateable;
|
||||
private boolean acceptsNull = false;
|
||||
private T value;
|
||||
|
||||
/**
|
||||
* Creates a new Parameter
|
||||
* @param name the parameter name, used in parsing and serialization
|
||||
* @param updateable whether the parameter can be updated with a new value during a mapping update
|
||||
* @param defaultValue the default value for the parameter, used if unspecified in mappings
|
||||
* @param parser a function that converts an object to a parameter value
|
||||
* @param initializer a function that reads a parameter value from an existing mapper
|
||||
*/
|
||||
public Parameter(String name, boolean updateable, T defaultValue,
|
||||
BiFunction<String, Object, T> parser, Function<FieldMapper, T> initializer) {
|
||||
this.name = name;
|
||||
this.defaultValue = defaultValue;
|
||||
this.value = defaultValue;
|
||||
this.parser = parser;
|
||||
this.initializer = initializer;
|
||||
this.updateable = updateable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current value of the parameter
|
||||
*/
|
||||
public T getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the current value of the parameter
|
||||
*/
|
||||
public void setValue(T value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows the parameter to accept a {@code null} value
|
||||
*/
|
||||
public Parameter<T> acceptsNull() {
|
||||
this.acceptsNull = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void init(FieldMapper toInit) {
|
||||
this.value = initializer.apply(toInit);
|
||||
}
|
||||
|
||||
private void parse(String field, Object in) {
|
||||
this.value = parser.apply(field, in);
|
||||
}
|
||||
|
||||
private void merge(FieldMapper toMerge, Conflicts conflicts) {
|
||||
T value = initializer.apply(toMerge);
|
||||
if (updateable == false && Objects.equals(this.value, value) == false) {
|
||||
conflicts.addConflict(name, this.value.toString(), value.toString());
|
||||
} else {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
private void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (includeDefaults || (Objects.equals(defaultValue, value) == false)) {
|
||||
builder.field(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a parameter that takes the values {@code true} or {@code false}
|
||||
* @param name the parameter name
|
||||
* @param updateable whether the parameter can be changed by a mapping update
|
||||
* @param initializer a function that reads the parameter value from an existing mapper
|
||||
* @param defaultValue the default value, to be used if the parameter is undefined in a mapping
|
||||
*/
|
||||
public static Parameter<Boolean> boolParam(String name, boolean updateable,
|
||||
Function<FieldMapper, Boolean> initializer, boolean defaultValue) {
|
||||
return new Parameter<>(name, updateable, defaultValue, (n, o) -> XContentMapValues.nodeBooleanValue(o), initializer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a parameter that takes a float value
|
||||
* @param name the parameter name
|
||||
* @param updateable whether the parameter can be changed by a mapping update
|
||||
* @param initializer a function that reads the parameter value from an existing mapper
|
||||
* @param defaultValue the default value, to be used if the parameter is undefined in a mapping
|
||||
*/
|
||||
public static Parameter<Float> floatParam(String name, boolean updateable,
|
||||
Function<FieldMapper, Float> initializer, float defaultValue) {
|
||||
return new Parameter<>(name, updateable, defaultValue, (n, o) -> XContentMapValues.nodeFloatValue(o), initializer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a parameter that takes a string value
|
||||
* @param name the parameter name
|
||||
* @param updateable whether the parameter can be changed by a mapping update
|
||||
* @param initializer a function that reads the parameter value from an existing mapper
|
||||
* @param defaultValue the default value, to be used if the parameter is undefined in a mapping
|
||||
*/
|
||||
public static Parameter<String> stringParam(String name, boolean updateable,
|
||||
Function<FieldMapper, String> initializer, String defaultValue) {
|
||||
return new Parameter<>(name, updateable, defaultValue,
|
||||
(n, o) -> XContentMapValues.nodeStringValue(o), initializer);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class Conflicts {
|
||||
|
||||
private final String mapperName;
|
||||
private final List<String> conflicts = new ArrayList<>();
|
||||
|
||||
Conflicts(String mapperName) {
|
||||
this.mapperName = mapperName;
|
||||
}
|
||||
|
||||
void addConflict(String parameter, String existing, String toMerge) {
|
||||
conflicts.add("Cannot update parameter [" + parameter + "] from [" + existing + "] to [" + toMerge + "]");
|
||||
}
|
||||
|
||||
void check() {
|
||||
if (conflicts.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
String message = "Mapper for [" + mapperName + "] conflicts with existing mapper:\n\t"
|
||||
+ String.join("\n\t", conflicts);
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A Builder for a ParametrizedFieldMapper
|
||||
*/
|
||||
public abstract static class Builder extends Mapper.Builder<Builder> {
|
||||
|
||||
protected final MultiFields.Builder multiFieldsBuilder = new MultiFields.Builder();
|
||||
protected final CopyTo.Builder copyTo = new CopyTo.Builder();
|
||||
|
||||
/**
|
||||
* Creates a new Builder with a field name
|
||||
*/
|
||||
protected Builder(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises all parameters from an existing mapper
|
||||
*/
|
||||
public Builder init(FieldMapper initializer) {
|
||||
for (Parameter<?> param : getParameters()) {
|
||||
param.init(initializer);
|
||||
}
|
||||
for (Mapper subField : initializer.multiFields) {
|
||||
multiFieldsBuilder.add(subField);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private void merge(FieldMapper in, Conflicts conflicts) {
|
||||
for (Parameter<?> param : getParameters()) {
|
||||
param.merge(in, conflicts);
|
||||
}
|
||||
for (Mapper newSubField : in.multiFields) {
|
||||
multiFieldsBuilder.update(newSubField, parentPath(newSubField.name()));
|
||||
}
|
||||
this.copyTo.reset(in.copyTo);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the list of parameters defined for this mapper
|
||||
*/
|
||||
protected abstract List<Parameter<?>> getParameters();
|
||||
|
||||
@Override
|
||||
public abstract ParametrizedFieldMapper build(BuilderContext context);
|
||||
|
||||
/**
|
||||
* Builds the full name of the field, taking into account parent objects
|
||||
*/
|
||||
protected String buildFullName(BuilderContext context) {
|
||||
return context.path().pathAsText(name);
|
||||
}
|
||||
|
||||
private void toXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
for (Parameter<?> parameter : getParameters()) {
|
||||
parameter.toXContent(builder, includeDefaults);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse mapping parameters from a map of mappings
|
||||
* @param name the field mapper name
|
||||
* @param parserContext the parser context
|
||||
* @param fieldNode the root node of the map of mappings for this field
|
||||
*/
|
||||
public final void parse(String name, TypeParser.ParserContext parserContext, Map<String, Object> fieldNode) {
|
||||
Map<String, Parameter<?>> paramsMap = new HashMap<>();
|
||||
for (Parameter<?> param : getParameters()) {
|
||||
paramsMap.put(param.name, param);
|
||||
}
|
||||
String type = (String) fieldNode.remove("type");
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = entry.getKey();
|
||||
final Object propNode = entry.getValue();
|
||||
if (Objects.equals("fields", propName)) {
|
||||
TypeParsers.parseMultiField(multiFieldsBuilder::add, name, parserContext, propName, propNode);
|
||||
iterator.remove();
|
||||
continue;
|
||||
}
|
||||
if (Objects.equals("copy_to", propName)) {
|
||||
TypeParsers.parseCopyFields(propNode).forEach(copyTo::add);
|
||||
iterator.remove();
|
||||
continue;
|
||||
}
|
||||
Parameter<?> parameter = paramsMap.get(propName);
|
||||
if (parameter == null) {
|
||||
throw new MapperParsingException("unknown parameter [" + propName
|
||||
+ "] on mapper [" + name + "] of type [" + type + "]");
|
||||
}
|
||||
if (propNode == null && parameter.acceptsNull == false) {
|
||||
throw new MapperParsingException("[" + propName + "] on mapper [" + name
|
||||
+ "] of type [" + type + "] must not have a [null] value");
|
||||
}
|
||||
parameter.parse(name, propNode);
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -179,7 +179,7 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
} else if (propName.equals("format")) {
|
||||
builder.format(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
} else if (TypeParsers.parseMultiField(builder::addMultiField, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,10 +29,12 @@ import org.elasticsearch.index.analysis.AnalysisMode;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -181,12 +183,7 @@ public class TypeParsers {
|
|||
/**
|
||||
* Parse the {@code meta} key of the mapping.
|
||||
*/
|
||||
public static void parseMeta(FieldMapper.Builder<?> builder, String name, Map<String, Object> fieldNode) {
|
||||
Object metaObject = fieldNode.remove("meta");
|
||||
if (metaObject == null) {
|
||||
// no meta
|
||||
return;
|
||||
}
|
||||
public static Map<String, String> parseMeta(String name, Object metaObject) {
|
||||
if (metaObject instanceof Map == false) {
|
||||
throw new MapperParsingException("[meta] must be an object, got " + metaObject.getClass().getSimpleName() +
|
||||
"[" + metaObject + "] for field [" + name +"]");
|
||||
|
@ -219,17 +216,17 @@ public class TypeParsers {
|
|||
}
|
||||
final Function<Map.Entry<String, ?>, Object> entryValueFunction = Map.Entry::getValue;
|
||||
final Function<Object, String> stringCast = String.class::cast;
|
||||
Map<String, String> checkedMeta = Collections.unmodifiableMap(meta.entrySet().stream()
|
||||
return Collections.unmodifiableMap(meta.entrySet().stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, entryValueFunction.andThen(stringCast))));
|
||||
builder.meta(checkedMeta);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Parse common field attributes such as {@code doc_values} or {@code store}.
|
||||
*/
|
||||
public static void parseField(FieldMapper.Builder<?> builder, String name, Map<String, Object> fieldNode,
|
||||
Mapper.TypeParser.ParserContext parserContext) {
|
||||
parseMeta(builder, name, fieldNode);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = entry.getKey();
|
||||
|
@ -238,6 +235,9 @@ public class TypeParsers {
|
|||
if (propName.equals("store")) {
|
||||
builder.store(XContentMapValues.nodeBooleanValue(propNode, name + ".store"));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("meta")) {
|
||||
builder.meta(parseMeta(name, propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index")) {
|
||||
builder.index(XContentMapValues.nodeBooleanValue(propNode, name + ".index"));
|
||||
iterator.remove();
|
||||
|
@ -254,22 +254,26 @@ public class TypeParsers {
|
|||
deprecationLogger.deprecatedAndMaybeLog("similarity",
|
||||
"The [similarity] parameter has no effect on field [" + name + "] and will be removed in 8.0");
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
} else if (parseMultiField(builder::addMultiField, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("copy_to")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] " +
|
||||
"which is within a multi field.");
|
||||
} else {
|
||||
parseCopyFields(propNode, builder);
|
||||
List<String> copyFields = parseCopyFields(propNode);
|
||||
FieldMapper.CopyTo.Builder cpBuilder = new FieldMapper.CopyTo.Builder();
|
||||
copyFields.forEach(cpBuilder::add);
|
||||
builder.copyTo(cpBuilder.build());
|
||||
}
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext,
|
||||
String propName, Object propNode) {
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public static boolean parseMultiField(Consumer<Mapper.Builder> multiFieldsBuilder, String name,
|
||||
Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
if (propName.equals("fields")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("multifield_within_multifield", "At least one multi-field, [" + name + "], was " +
|
||||
|
@ -320,7 +324,7 @@ public class TypeParsers {
|
|||
if (typeParser == null) {
|
||||
throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + multiFieldName + "]");
|
||||
}
|
||||
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
|
||||
multiFieldsBuilder.accept(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
|
||||
multiFieldNodes.remove("type");
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, multiFieldNodes, parserContext.indexVersionCreated());
|
||||
}
|
||||
|
@ -375,17 +379,16 @@ public class TypeParsers {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) {
|
||||
FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder();
|
||||
public static List<String> parseCopyFields(Object propNode) {
|
||||
List<String> copyFields = new ArrayList<>();
|
||||
if (isArray(propNode)) {
|
||||
for (Object node : (List<Object>) propNode) {
|
||||
copyToBuilder.add(nodeStringValue(node, null));
|
||||
copyFields.add(nodeStringValue(node, null));
|
||||
}
|
||||
} else {
|
||||
copyToBuilder.add(nodeStringValue(propNode, null));
|
||||
copyFields.add(nodeStringValue(propNode, null));
|
||||
}
|
||||
builder.copyTo(copyToBuilder.build());
|
||||
return copyFields;
|
||||
}
|
||||
|
||||
public static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) {
|
||||
|
|
|
@ -122,7 +122,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
} else if (type.equals("geo_point")) {
|
||||
fieldType = new GeoPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType();
|
||||
} else if (type.equals("binary")) {
|
||||
fieldType = new BinaryFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType();
|
||||
fieldType = new BinaryFieldMapper.Builder(fieldName, docValues).build(context).fieldType();
|
||||
} else {
|
||||
throw new UnsupportedOperationException(type);
|
||||
}
|
||||
|
|
|
@ -32,28 +32,18 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class BinaryFieldMapperTests extends FieldMapperTestCase<BinaryFieldMapper.Builder> {
|
||||
|
||||
@Override
|
||||
protected Set<String> unsupportedProperties() {
|
||||
return org.elasticsearch.common.collect.Set.of("analyzer", "eager_global_ordinals", "norms", "similarity", "index");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BinaryFieldMapper.Builder newBuilder() {
|
||||
return new BinaryFieldMapper.Builder("binary");
|
||||
}
|
||||
public class BinaryFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
|
|
@ -27,6 +27,9 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -41,25 +44,21 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class BooleanFieldMapperTests extends FieldMapperTestCase<BooleanFieldMapper.Builder> {
|
||||
public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
private IndexService indexService;
|
||||
private DocumentMapperParser parser;
|
||||
|
||||
@Override
|
||||
protected Set<String> unsupportedProperties() {
|
||||
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
|
@ -285,9 +284,13 @@ public class BooleanFieldMapperTests extends FieldMapperTestCase<BooleanFieldMap
|
|||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BooleanFieldMapper.Builder newBuilder() {
|
||||
return new BooleanFieldMapper.Builder("boolean");
|
||||
public void testBoosts() throws Exception {
|
||||
String mapping = "{\"_doc\":{\"properties\":{\"field\":{\"type\":\"boolean\",\"boost\":2.0}}}}";
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
MappedFieldType ft = indexService.mapperService().fieldType("field");
|
||||
assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,6 +2,14 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
|
@ -22,14 +30,6 @@ import org.elasticsearch.common.compress.CompressedXContent;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class NullValueTests extends ESSingleNodeTestCase {
|
||||
public void testNullNullValue() throws Exception {
|
||||
IndexService indexService = createIndex("test", Settings.builder().build());
|
||||
|
@ -52,7 +52,9 @@ public class NullValueTests extends ESSingleNodeTestCase {
|
|||
indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
fail("Test should have failed because [null_value] was null.");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), equalTo("Property [null_value] cannot be null."));
|
||||
assertThat(e.getMessage(),
|
||||
either(equalTo("Property [null_value] cannot be null."))
|
||||
.or(containsString("must not have a [null] value")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,252 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.ParametrizedFieldMapper.Parameter;
|
||||
import org.elasticsearch.plugins.MapperPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ParametrizedMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
public static class TestPlugin extends Plugin implements MapperPlugin {
|
||||
@Override
|
||||
public Map<String, Mapper.TypeParser> getMappers() {
|
||||
return org.elasticsearch.common.collect.Map.of("test_mapper", new TypeParser());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return Collections.singletonList(TestPlugin.class);
|
||||
}
|
||||
|
||||
private static TestMapper toType(Mapper in) {
|
||||
return (TestMapper) in;
|
||||
}
|
||||
|
||||
public static class Builder extends ParametrizedFieldMapper.Builder {
|
||||
|
||||
final Parameter<Boolean> fixed
|
||||
= Parameter.boolParam("fixed", false, m -> toType(m).fixed, true);
|
||||
final Parameter<Boolean> fixed2
|
||||
= Parameter.boolParam("fixed2", false, m -> toType(m).fixed2, false);
|
||||
final Parameter<String> variable
|
||||
= Parameter.stringParam("variable", true, m -> toType(m).variable, "default").acceptsNull();
|
||||
|
||||
protected Builder(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Parameter<?>> getParameters() {
|
||||
return Arrays.asList(fixed, fixed2, variable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParametrizedFieldMapper build(Mapper.BuilderContext context) {
|
||||
return new TestMapper(name(), buildFullName(context),
|
||||
multiFieldsBuilder.build(this, context), copyTo.build(), this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
|
||||
@Override
|
||||
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(name);
|
||||
builder.parse(name, parserContext, node);
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestMapper extends ParametrizedFieldMapper {
|
||||
|
||||
private final boolean fixed;
|
||||
private final boolean fixed2;
|
||||
private final String variable;
|
||||
|
||||
protected TestMapper(String simpleName, String fullName, MultiFields multiFields, CopyTo copyTo,
|
||||
ParametrizedMapperTests.Builder builder) {
|
||||
super(simpleName, new KeywordFieldMapper.KeywordFieldType(fullName), multiFields, copyTo);
|
||||
this.fixed = builder.fixed.getValue();
|
||||
this.fixed2 = builder.fixed2.getValue();
|
||||
this.variable = builder.variable.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder getMergeBuilder() {
|
||||
return new ParametrizedMapperTests.Builder(simpleName()).init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context) throws IOException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return "test_mapper";
|
||||
}
|
||||
}
|
||||
|
||||
private static TestMapper fromMapping(String mapping) {
|
||||
Mapper.TypeParser.ParserContext pc = new Mapper.TypeParser.ParserContext(s -> null, null, s -> {
|
||||
if (Objects.equals("keyword", s)) {
|
||||
return new KeywordFieldMapper.TypeParser();
|
||||
}
|
||||
if (Objects.equals("binary", s)) {
|
||||
return new BinaryFieldMapper.TypeParser();
|
||||
}
|
||||
return null;
|
||||
}, Version.CURRENT, () -> null);
|
||||
return (TestMapper) new TypeParser()
|
||||
.parse("field", XContentHelper.convertToMap(JsonXContent.jsonXContent, mapping, true), pc)
|
||||
.build(new Mapper.BuilderContext(Settings.EMPTY, new ContentPath(0)));
|
||||
}
|
||||
|
||||
// defaults - create empty builder config, and serialize with and without defaults
|
||||
public void testDefaults() throws IOException {
|
||||
String mapping = "{\"type\":\"test_mapper\"}";
|
||||
TestMapper mapper = fromMapping(mapping);
|
||||
|
||||
assertTrue(mapper.fixed);
|
||||
assertEquals("default", mapper.variable);
|
||||
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper));
|
||||
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
ToXContent.Params params = new ToXContent.MapParams(org.elasticsearch.common.collect.Map.of("include_defaults", "true"));
|
||||
builder.startObject();
|
||||
mapper.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
assertEquals("{\"field\":{\"type\":\"test_mapper\",\"fixed\":true,\"fixed2\":false,\"variable\":\"default\"}}",
|
||||
Strings.toString(builder));
|
||||
}
|
||||
|
||||
// merging - try updating 'fixed' and 'fixed2' should get an error, try updating 'variable' and verify update
|
||||
public void testMerging() {
|
||||
String mapping = "{\"type\":\"test_mapper\",\"fixed\":false}";
|
||||
TestMapper mapper = fromMapping(mapping);
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper));
|
||||
|
||||
TestMapper badMerge = fromMapping("{\"type\":\"test_mapper\",\"fixed\":true,\"fixed2\":true}");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.merge(badMerge));
|
||||
String expectedError = "Mapper for [field] conflicts with existing mapper:\n" +
|
||||
"\tCannot update parameter [fixed] from [false] to [true]\n" +
|
||||
"\tCannot update parameter [fixed2] from [false] to [true]";
|
||||
assertEquals(expectedError, e.getMessage());
|
||||
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper)); // original mapping is unaffected
|
||||
|
||||
// TODO: should we have to include 'fixed' here? Or should updates take as 'defaults' the existing values?
|
||||
TestMapper goodMerge = fromMapping("{\"type\":\"test_mapper\",\"fixed\":false,\"variable\":\"updated\"}");
|
||||
TestMapper merged = (TestMapper) mapper.merge(goodMerge);
|
||||
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper)); // original mapping is unaffected
|
||||
assertEquals("{\"field\":{\"type\":\"test_mapper\",\"fixed\":false,\"variable\":\"updated\"}}", Strings.toString(merged));
|
||||
|
||||
}
|
||||
|
||||
// add multifield, verify, add second multifield, verify, overwrite second multifield
|
||||
public void testMultifields() {
|
||||
String mapping = "{\"type\":\"test_mapper\",\"variable\":\"foo\",\"fields\":{\"sub\":{\"type\":\"keyword\"}}}";
|
||||
TestMapper mapper = fromMapping(mapping);
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper));
|
||||
|
||||
String addSubField = "{\"type\":\"test_mapper\",\"variable\":\"foo\",\"fields\":{\"sub2\":{\"type\":\"keyword\"}}}";
|
||||
TestMapper toMerge = fromMapping(addSubField);
|
||||
TestMapper merged = (TestMapper) mapper.merge(toMerge);
|
||||
assertEquals("{\"field\":{\"type\":\"test_mapper\",\"variable\":\"foo\"," +
|
||||
"\"fields\":{\"sub\":{\"type\":\"keyword\"},\"sub2\":{\"type\":\"keyword\"}}}}", Strings.toString(merged));
|
||||
|
||||
String badSubField = "{\"type\":\"test_mapper\",\"variable\":\"foo\",\"fields\":{\"sub2\":{\"type\":\"binary\"}}}";
|
||||
TestMapper badToMerge = fromMapping(badSubField);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> merged.merge(badToMerge));
|
||||
assertEquals("mapper [field.sub2] cannot be changed from type [keyword] to [binary]", e.getMessage());
|
||||
}
|
||||
|
||||
// add copy_to, verify
|
||||
public void testCopyTo() {
|
||||
String mapping = "{\"type\":\"test_mapper\",\"variable\":\"foo\",\"copy_to\":[\"other\"]}";
|
||||
TestMapper mapper = fromMapping(mapping);
|
||||
assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper));
|
||||
|
||||
// On update, copy_to is completely replaced
|
||||
|
||||
TestMapper toMerge = fromMapping("{\"type\":\"test_mapper\",\"variable\":\"updated\",\"copy_to\":[\"foo\",\"bar\"]}");
|
||||
TestMapper merged = (TestMapper) mapper.merge(toMerge);
|
||||
assertEquals("{\"field\":{\"type\":\"test_mapper\",\"variable\":\"updated\",\"copy_to\":[\"foo\",\"bar\"]}}",
|
||||
Strings.toString(merged));
|
||||
|
||||
TestMapper removeCopyTo = fromMapping("{\"type\":\"test_mapper\",\"variable\":\"updated\"}");
|
||||
TestMapper noCopyTo = (TestMapper) merged.merge(removeCopyTo);
|
||||
assertEquals("{\"field\":{\"type\":\"test_mapper\",\"variable\":\"updated\"}}", Strings.toString(noCopyTo));
|
||||
}
|
||||
|
||||
public void testNullables() {
|
||||
String mapping = "{\"type\":\"test_mapper\",\"fixed\":null}";
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> fromMapping(mapping));
|
||||
assertEquals("[fixed] on mapper [field] of type [test_mapper] must not have a [null] value", e.getMessage());
|
||||
|
||||
String fine = "{\"type\":\"test_mapper\",\"variable\":null}";
|
||||
TestMapper mapper = fromMapping(fine);
|
||||
assertEquals("{\"field\":" + fine + "}", Strings.toString(mapper));
|
||||
}
|
||||
|
||||
public void testObjectSerialization() throws IOException {
|
||||
|
||||
IndexService indexService = createIndex("test");
|
||||
|
||||
String mapping = "{\"_doc\":{" +
|
||||
"\"properties\":{" +
|
||||
"\"actual\":{\"type\":\"double\"}," +
|
||||
"\"bucket_count\":{\"type\":\"long\"}," +
|
||||
"\"bucket_influencers\":{\"type\":\"nested\",\"properties\":{" +
|
||||
"\"anomaly_score\":{\"type\":\"double\"}," +
|
||||
"\"bucket_span\":{\"type\":\"long\"}," +
|
||||
"\"is_interim\":{\"type\":\"boolean\"}}}}}}";
|
||||
indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, Strings.toString(indexService.mapperService().documentMapper()));
|
||||
|
||||
indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, Strings.toString(indexService.mapperService().documentMapper()));
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -429,15 +429,18 @@ public class RootObjectMapperTests extends ESSingleNodeTestCase {
|
|||
mapping.endObject();
|
||||
}
|
||||
mapping.endObject();
|
||||
|
||||
DocumentMapper mapper =
|
||||
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE);
|
||||
assertThat(mapper.mappingSource().toString(), containsString("\"foo\":\"bar\""));
|
||||
if (useMatchMappingType) {
|
||||
assertWarnings("dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"*\",\"mapping\":{" +
|
||||
"\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], caused by [Unused mapping attributes [{foo=bar}]]");
|
||||
"\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], " +
|
||||
"caused by [unknown parameter [foo] on mapper [__dummy__] of type [null]]");
|
||||
} else {
|
||||
assertWarnings("dynamic template [my_template] has invalid content [{\"match\":\"string_*\",\"mapping\":{" +
|
||||
"\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], caused by [Unused mapping attributes [{foo=bar}]]");
|
||||
"\"foo\":\"bar\",\"type\":\"{dynamic_type}\"}}], " +
|
||||
"caused by [unknown parameter [foo] on mapper [__dummy__] of type [null]]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,7 +121,6 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
public Mapper.Builder<Builder> parse(String name, Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
Builder builder = new HistogramFieldMapper.Builder(name);
|
||||
TypeParsers.parseMeta(builder, name, node);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
|
@ -130,6 +129,10 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode, name + "." + Names.IGNORE_MALFORMED));
|
||||
iterator.remove();
|
||||
}
|
||||
if (propName.equals("meta")) {
|
||||
builder.meta(TypeParsers.parseMeta(propName, propNode));
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -102,7 +102,9 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
|||
if (value != null) {
|
||||
builder.setValue(value.toString());
|
||||
}
|
||||
TypeParsers.parseMeta(builder, name, node);
|
||||
if (node.containsKey("meta")) {
|
||||
builder.meta(TypeParsers.parseMeta(name, node.remove("meta")));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -152,11 +154,11 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
|
|||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String familyTypeName() {
|
||||
return KeywordFieldMapper.CONTENT_TYPE;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
|
||||
|
|
Loading…
Reference in New Issue