Move FieldMapper#valueFetcher to MappedFieldType (#62974) (#63220)

For runtime fields, we will want to do all search-time interaction with
a field definition via a MappedFieldType, rather than a FieldMapper, to
avoid interfering with the logic of document parsing. Currently, fetching
values for runtime scripts and for building top hits responses need to
call a method on FieldMapper. This commit moves this method to
MappedFieldType, incidentally simplifying the current call sites and freeing
us up to implement runtime fields as pure MappedFieldType objects.
This commit is contained in:
Alan Woodward 2020-10-04 14:54:59 +01:00 committed by GitHub
parent 1c136bb7fc
commit 01950bc80f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
119 changed files with 1761 additions and 1408 deletions

View File

@ -113,6 +113,19 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper {
throw new IllegalArgumentException("[rank_feature] fields do not support sorting, scripting or aggregating");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_feature] fields are not supported");
@ -162,7 +175,7 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper {
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
}
private Float objectToFloat(Object value) {
private static Float objectToFloat(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
} else {
@ -170,19 +183,6 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
@ -50,6 +51,11 @@ public class RankFeatureMetaFieldMapper extends MetadataFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on [_feature]");

View File

@ -88,6 +88,19 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper {
throw new IllegalArgumentException("[rank_features] fields do not support sorting, scripting or aggregating");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_features] fields are not supported");
@ -152,19 +165,6 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper {
throw new AssertionError("parse is implemented directly");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -124,7 +124,7 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
@Override
public ScaledFloatFieldMapper build(BuilderContext context) {
ScaledFloatFieldType type = new ScaledFloatFieldType(buildFullName(context), indexed.getValue(), stored.getValue(),
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue());
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue(), nullValue.getValue());
return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
@ -134,15 +134,17 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
public static final class ScaledFloatFieldType extends SimpleMappedFieldType {
private final double scalingFactor;
private final Double nullValue;
public ScaledFloatFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
Map<String, String> meta, double scalingFactor) {
Map<String, String> meta, double scalingFactor, Double nullValue) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.scalingFactor = scalingFactor;
this.nullValue = nullValue;
}
public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor);
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
}
public double getScalingFactor() {
@ -218,6 +220,30 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
};
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}
double scalingFactor = getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
@ -394,31 +420,6 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
return doubleValue;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}
double scalingFactor = fieldType().getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}
private static class ScaledFloatIndexFieldData extends IndexNumericFieldData {
private final IndexNumericFieldData scaledFieldData;

View File

@ -263,6 +263,11 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
return shingleFields[Math.min(indexFromShingleSize, shingleFields.length - 1)];
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
if (prefixField == null || prefixField.termLengthWithinBounds(value.length()) == false) {
@ -369,6 +374,11 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
.build();
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "prefix";
@ -405,11 +415,6 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
throw new UnsupportedOperationException();
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -451,11 +456,6 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected String contentType() {
return "shingle";
@ -478,6 +478,11 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
this.prefixFieldType = prefixFieldType;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return CONTENT_TYPE;
@ -573,11 +578,6 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
@ -79,6 +78,8 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper {
index.getValue(),
store.getValue(),
hasDocValues.getValue(),
false,
nullValue.getValue(),
meta.getValue());
return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
@ -129,20 +130,6 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper {
);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}
/**
* Count position increments in a token stream. Package private for testing.
* @param analyzer analyzer to create token stream

View File

@ -25,11 +25,8 @@ import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
@ -144,13 +141,4 @@ public class RankFeatureFieldMapperTests extends MapperTestCase {
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);
assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(List.of(42.9f), fetchSourceValue(mapper, "42.9"));
}
}

View File

@ -19,6 +19,11 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.util.Collections;
public class RankFeatureFieldTypeTests extends FieldTypeTestCase {
@ -27,4 +32,13 @@ public class RankFeatureFieldTypeTests extends FieldTypeTestCase {
MappedFieldType fieldType = new RankFeatureFieldMapper.RankFeatureFieldType("field", Collections.emptyMap(), true);
assertFalse(fieldType.isAggregatable());
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new RankFeatureFieldMapper.Builder("field").build(context).fieldType();
assertEquals(Collections.singletonList(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(Collections.singletonList(42.9f), fetchSourceValue(mapper, "42.9"));
}
}

View File

@ -21,11 +21,8 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -285,22 +282,4 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
containsString("Failed to parse mapping [_doc]: Field [scaling_factor] is required"));
assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future");
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
ScaledFloatFieldMapper mapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(3.14), fetchSourceValue(mapper, 3.1415926));
assertEquals(org.elasticsearch.common.collect.List.of(3.14), fetchSourceValue(mapper, "3.1415"));
assertEquals(org.elasticsearch.common.collect.List.of(), fetchSourceValue(mapper, ""));
ScaledFloatFieldMapper nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.nullValue(2.71)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(2.71), fetchSourceValue(nullValueMapper, ""));
}
}

View File

@ -29,6 +29,9 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
@ -65,7 +68,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
// this test checks that searching scaled floats yields the same results as
// searching doubles that are rounded to the closest half float
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(
"scaled_float", true, false, false, Collections.emptyMap(), 0.1 + randomDouble() * 100);
"scaled_float", true, false, false, Collections.emptyMap(), 0.1 + randomDouble() * 100, null);
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
final int numDocs = 1000;
@ -175,4 +178,24 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
}
IOUtils.close(w, dir);
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.build(context)
.fieldType();
assertEquals(Collections.singletonList(3.14), fetchSourceValue(mapper, 3.1415926));
assertEquals(Collections.singletonList(3.14), fetchSourceValue(mapper, "3.1415"));
assertEquals(Collections.emptyList(), fetchSourceValue(mapper, ""));
MappedFieldType nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.nullValue(2.71)
.build(context)
.fieldType();
assertEquals(Collections.singletonList(2.71), fetchSourceValue(nullValueMapper, ""));
}
}

View File

@ -97,6 +97,11 @@ public class MetaJoinFieldMapper extends FieldMapper {
return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
@ -139,11 +144,6 @@ public class MetaJoinFieldMapper extends FieldMapper {
throw new IllegalStateException("Should never be called");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -113,6 +113,11 @@ public final class ParentIdFieldMapper extends FieldMapper {
return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
@ -182,11 +187,6 @@ public final class ParentIdFieldMapper extends FieldMapper {
context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue));
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) other;

View File

@ -222,6 +222,19 @@ public final class ParentJoinFieldMapper extends FieldMapper {
return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
@ -343,19 +356,6 @@ public final class ParentJoinFieldMapper extends FieldMapper {
throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public void parse(ParseContext context) throws IOException {
context.path().add(simpleName());

View File

@ -224,6 +224,19 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List<BytesReference> documents,
IndexSearcher searcher, boolean excludeNestedDocuments, Version indexVersion) throws IOException {
IndexReader indexReader = searcher.getIndexReader();
@ -373,19 +386,6 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
processQuery(query, context);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField,
QueryBuilder queryBuilder, ParseContext context) throws IOException {
if (indexVersion.onOrAfter(Version.V_6_0_0_beta2)) {

View File

@ -68,22 +68,29 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
FIELD_TYPE.freeze();
}
public static final String NULL_VALUE = null;
public static final int IGNORE_ABOVE = Integer.MAX_VALUE;
}
public static final class CollationFieldType extends StringFieldType {
private final Collator collator;
private final String nullValue;
private final int ignoreAbove;
public CollationFieldType(String name, boolean isSearchable, boolean isStored, boolean hasDocValues,
Collator collator, Map<String, String> meta) {
Collator collator, String nullValue, int ignoreAbove, Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.collator = collator;
this.nullValue = nullValue;
this.ignoreAbove = ignoreAbove;
}
public CollationFieldType(String name, boolean searchable, Collator collator) {
this(name, searchable, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap());
}
public CollationFieldType(String name, Collator collator) {
this(name, true, false, true, collator, Collections.emptyMap());
this(name, true, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap());
}
@Override
@ -91,8 +98,22 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
return CONTENT_TYPE;
}
public Collator collator() {
return collator;
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected String parseSourceValue(Object value) {
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
return keywordValue;
}
};
}
@Override
@ -429,8 +450,9 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
@Override
public ICUCollationKeywordFieldMapper build(BuilderContext context) {
final Collator collator = buildCollator();
CollationFieldType ft = new CollationFieldType(buildFullName(context), indexed, fieldType.stored(), hasDocValues,
collator, meta);
CollationFieldType ft
= new CollationFieldType(buildFullName(context), indexed, fieldType.stored(),
hasDocValues, collator, nullValue, ignoreAbove, meta);
return new ICUCollationKeywordFieldMapper(name, fieldType, ft,
multiFieldsBuilder.build(this, context), copyTo, rules, language, country, variant, strength, decomposition,
alternate, caseLevel, caseFirst, numeric, variableTop, hiraganaQuaternaryMode, ignoreAbove, collator, nullValue);
@ -726,22 +748,4 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected String parseSourceValue(Object value) {
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
return keywordValue;
}
};
}
}

View File

@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MappedFieldType.Relation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class CollationFieldTypeTests extends FieldTypeTestCase{
@ -66,7 +65,7 @@ public class CollationFieldTypeTests extends FieldTypeTestCase{
assertEquals(new TermQuery(new Term("field", expected)), ft.termQuery("I WİLL USE TURKİSH CASING", null));
MappedFieldType unsearchable = new CollationFieldType("field", false, false, true, collator, Collections.emptyMap());
MappedFieldType unsearchable = new CollationFieldType("field", false, collator);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termQuery("bar", null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -86,7 +85,7 @@ public class CollationFieldTypeTests extends FieldTypeTestCase{
assertEquals(new TermInSetQuery("field", terms),
ft.termsQuery(Arrays.asList("foo", "bar"), null));
MappedFieldType unsearchable = new CollationFieldType("field", false, false, true, collator, Collections.emptyMap());
MappedFieldType unsearchable = new CollationFieldType("field", false, collator);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -135,7 +134,7 @@ public class CollationFieldTypeTests extends FieldTypeTestCase{
assertEquals("[range] queries on [text] or [keyword] fields cannot be executed when " +
"'search.allow_expensive_queries' is set to false.", ee.getMessage());
MappedFieldType unsearchable = new CollationFieldType("field", false, false, true, DEFAULT_COLLATOR, Collections.emptyMap());
MappedFieldType unsearchable = new CollationFieldType("field", false, DEFAULT_COLLATOR);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.rangeQuery("a", "b", false, false, null, null, null, MOCK_QSC));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());

View File

@ -26,11 +26,8 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin;
@ -120,7 +117,8 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase2<IC
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("null_value", "1234")));
doc = mapper.parse(source(b -> {}));
doc = mapper.parse(source(b -> {
}));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
@ -298,24 +296,4 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase2<IC
assertEquals(0, fields.length);
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(context);
assertEquals(List.of("42"), fetchSourceValue(mapper, 42L));
assertEquals(List.of("true"), fetchSourceValue(mapper, true));
ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper, "value"));
assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L));
assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper, true));
ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.util.Collections;
public class ICUCollationKeywordFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(context);
assertEquals(Collections.singletonList("42"), fetchSourceValue(mapper.fieldType(), 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(mapper.fieldType(), true));
ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
assertEquals(Collections.emptyList(), fetchSourceValue(ignoreAboveMapper.fieldType(), "value"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(ignoreAboveMapper.fieldType(), 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(ignoreAboveMapper.fieldType(), true));
ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
assertEquals(Collections.singletonList("NULL"), fetchSourceValue(nullValueMapper.fieldType(), null));
}
}

View File

@ -28,12 +28,10 @@ import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
@ -46,11 +44,8 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -76,7 +71,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.mapper.FieldMapperTestCase.fetchSourceValue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
@ -677,20 +671,4 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
FieldMapper fieldMapper = new AnnotatedTextFieldMapper.Builder("field")
.indexAnalyzer(indexService.getIndexAnalyzers().getDefaultIndexAnalyzer())
.searchAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchAnalyzer())
.searchQuoteAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer())
.build(context);
AnnotatedTextFieldMapper mapper = (AnnotatedTextFieldMapper) fieldMapper;
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(mapper, "value"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(mapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(mapper, true));
}
}

View File

@ -39,14 +39,10 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.SourceValueFetcher;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.io.Reader;
@ -586,19 +582,6 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value.toString();
}
};
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -22,10 +22,16 @@ package org.elasticsearch.index.mapper.annotatedtext;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.queries.intervals.Intervals;
import org.apache.lucene.queries.intervals.IntervalsSource;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import java.io.IOException;
import java.util.Collections;
@ -38,4 +44,20 @@ public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase {
IntervalsSource source = ft.intervals("Donald Trump", 0, true, a, false);
assertEquals(Intervals.phrase(Intervals.term("donald"), Intervals.term("trump")), source);
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType fieldType = new AnnotatedTextFieldMapper.Builder("field")
.indexAnalyzer(Lucene.STANDARD_ANALYZER)
.searchAnalyzer(Lucene.STANDARD_ANALYZER)
.searchQuoteAnalyzer(Lucene.STANDARD_ANALYZER)
.build(context)
.fieldType();
assertEquals(Collections.singletonList("value"), fetchSourceValue(fieldType, "value"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(fieldType, 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(fieldType, true));
}
}

View File

@ -106,6 +106,19 @@ public class Murmur3FieldMapper extends ParametrizedFieldMapper {
return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Murmur3 fields are not searchable: [" + name() + "]");
@ -148,17 +161,4 @@ public class Murmur3FieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}
}

View File

@ -179,22 +179,6 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
String geoFormat = format != null ? format : GeoJsonGeometryFormat.NAME;
AbstractGeometryFieldType<Parsed, Processed> mappedFieldType = fieldType();
Parser<Parsed> geometryParser = mappedFieldType.geometryParser();
Function<Object, Object> valueParser = value -> geometryParser.parseAndFormatObject(value, geoFormat);
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return valueParser.apply(value);
}
};
}
public abstract static class TypeParser<T extends Builder> implements Mapper.TypeParser {
protected abstract T newBuilder(String name, Map<String, Object> params);
@ -239,9 +223,12 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
protected Indexer<Parsed, Processed> geometryIndexer;
protected Parser<Parsed> geometryParser;
protected final boolean parsesArrayValue;
protected AbstractGeometryFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, Map<String, String> meta) {
protected AbstractGeometryFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
boolean parsesArrayValue, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.parsesArrayValue = parsesArrayValue;
}
public void setGeometryIndexer(Indexer<Parsed, Processed> geometryIndexer) {
@ -266,6 +253,20 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
"Geometry fields do not support exact searching, use dedicated geometry queries instead: ["
+ name() + "]");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
String geoFormat = format != null ? format : GeoJsonGeometryFormat.NAME;
Function<Object, Object> valueParser = value -> geometryParser.parseAndFormatObject(value, geoFormat);
return new SourceValueFetcher(name(), mapperService, parsesArrayValue) {
@Override
protected Object parseSourceValue(Object value) {
return valueParser.apply(value);
}
};
}
}
protected Explicit<Boolean> ignoreMalformed;

View File

@ -119,7 +119,7 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
extends AbstractGeometryFieldType<Parsed, Processed> {
protected AbstractPointGeometryFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, meta);
super(name, indexed, stored, hasDocValues, true, meta);
}
}

View File

@ -158,8 +158,8 @@ public abstract class AbstractShapeGeometryFieldMapper<Parsed, Processed> extend
protected Orientation orientation = Defaults.ORIENTATION.value();
protected AbstractShapeGeometryFieldType(String name, boolean isSearchable, boolean isStored, boolean hasDocValues,
Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, meta);
boolean parsesArrayValue, Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, parsesArrayValue, meta);
}
public Orientation orientation() { return this.orientation; }

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
import java.util.List;
@ -85,6 +86,11 @@ public class AllFieldMapper extends MetadataFieldMapper {
super(NAME, false, false, false, TextSearchInfo.NONE, Collections.emptyMap());
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return CONTENT_TYPE;

View File

@ -98,6 +98,19 @@ public class BinaryFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
return DocValueFormat.BINARY;
@ -180,19 +193,6 @@ public class BinaryFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new BinaryFieldMapper.Builder(simpleName()).init(this);

View File

@ -98,7 +98,7 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
@Override
public BooleanFieldMapper build(BuilderContext context) {
MappedFieldType ft = new BooleanFieldType(buildFullName(context), indexed.getValue(), stored.getValue(),
docValues.getValue(), meta.getValue());
docValues.getValue(), nullValue.getValue(), meta.getValue());
ft.setBoost(boost.getValue());
return new BooleanFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
@ -108,13 +108,20 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
public static final class BooleanFieldType extends TermBasedFieldType {
private final Boolean nullValue;
public BooleanFieldType(String name, boolean isSearchable, boolean isStored, boolean hasDocValues,
Map<String, String> meta) {
Boolean nullValue, Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.nullValue = nullValue;
}
public BooleanFieldType(String name) {
this(name, true, false, true, Collections.emptyMap());
this(name, true, false, true, false, Collections.emptyMap());
}
public BooleanFieldType(String name, boolean searchable) {
this(name, searchable, false, true, false, Collections.emptyMap());
}
@Override
@ -122,6 +129,25 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected Boolean parseSourceValue(Object value) {
if (value instanceof Boolean) {
return (Boolean) value;
} else {
String textValue = value.toString();
return Booleans.parseBoolean(textValue.toCharArray(), 0, textValue.length(), false);
}
}
};
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value == null) {
@ -243,25 +269,6 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected Boolean parseSourceValue(Object value) {
if (value instanceof Boolean) {
return (Boolean) value;
} else {
String textValue = value.toString();
return Booleans.parseBoolean(textValue.toCharArray(), 0, textValue.length(), false);
}
}
};
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName()).init(this);

View File

@ -301,6 +301,24 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, true) {
@Override
protected List<?> parseSourceValue(Object value) {
if (value instanceof List) {
return (List<?>) value;
} else {
return Collections.singletonList(value);
}
}
};
}
}
private final int maxInputLength;
@ -515,24 +533,6 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected List<?> parseSourceValue(Object value) {
if (value instanceof List) {
return (List<?>) value;
} else {
return org.elasticsearch.common.collect.List.of(value);
}
}
};
}
static class CompletionInputMetadata {
public final String input;
public final Map<String, Set<String>> contexts;

View File

@ -251,7 +251,7 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
@Override
public DateFieldMapper build(BuilderContext context) {
DateFieldType ft = new DateFieldType(buildFullName(context), index.getValue(), store.getValue(), docValues.getValue(),
buildFormatter(), resolution, meta.getValue());
buildFormatter(), resolution, nullValue.getValue(), meta.getValue());
ft.setBoost(boost.getValue());
Long nullTimestamp = parseNullValue(ft);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
@ -273,25 +273,32 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
protected final DateFormatter dateTimeFormatter;
protected final DateMathParser dateMathParser;
protected final Resolution resolution;
protected final String nullValue;
public DateFieldType(String name, boolean isSearchable, boolean isStored, boolean hasDocValues,
DateFormatter dateTimeFormatter, Resolution resolution, Map<String, String> meta) {
DateFormatter dateTimeFormatter, Resolution resolution, String nullValue,
Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.dateTimeFormatter = dateTimeFormatter;
this.dateMathParser = dateTimeFormatter.toDateMathParser();
this.resolution = resolution;
this.nullValue = nullValue;
}
public DateFieldType(String name) {
this(name, true, false, true, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, Collections.emptyMap());
this(name, true, false, true, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, Collections.emptyMap());
}
public DateFieldType(String name, DateFormatter dateFormatter) {
this(name, true, false, true, dateFormatter, Resolution.MILLISECONDS, Collections.emptyMap());
this(name, true, false, true, dateFormatter, Resolution.MILLISECONDS, null, Collections.emptyMap());
}
public DateFieldType(String name, Resolution resolution) {
this(name, true, false, true, DEFAULT_DATE_TIME_FORMATTER, resolution, Collections.emptyMap());
this(name, true, false, true, DEFAULT_DATE_TIME_FORMATTER, resolution, null, Collections.emptyMap());
}
public DateFieldType(String name, Resolution resolution, DateFormatter dateFormatter) {
this(name, true, false, true, dateFormatter, resolution, null, Collections.emptyMap());
}
@Override
@ -316,6 +323,24 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())
: defaultFormatter;
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
public String parseSourceValue(Object value) {
String date = value.toString();
long timestamp = parse(date);
ZonedDateTime dateTime = resolution().toInstant(timestamp).atZone(ZoneOffset.UTC);
return formatter.format(dateTime);
}
};
}
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
Query query = rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context);
@ -609,24 +634,6 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = fieldType().dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())
: defaultFormatter;
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValueAsString) {
@Override
public String parseSourceValue(Object value) {
String date = value.toString();
long timestamp = fieldType().parse(date);
ZonedDateTime dateTime = fieldType().resolution().toInstant(timestamp).atZone(ZoneOffset.UTC);
return formatter.format(dateTime);
}
};
}
public boolean getIgnoreMalformed() {
return ignoreMalformed;
}

View File

@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@ -34,8 +33,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.AbstractXContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -276,11 +273,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
*/
protected abstract void parseCreateField(ParseContext context) throws IOException;
/**
* Create a helper class to fetch field values during the {@link FetchFieldsPhase}.
*/
public abstract ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, @Nullable String format);
protected final void createFieldNamesField(ParseContext context) {
assert fieldType().hasDocValues() == false : "_field_names should only be used when doc_values are turned off";
FieldNamesFieldType fieldNamesFieldType = context.docMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType();

View File

@ -28,6 +28,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -128,6 +129,11 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
return enabled;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on _field_names");

View File

@ -93,7 +93,7 @@ public class GeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<Geomet
private final VectorGeoShapeQueryProcessor queryProcessor;
public GeoShapeFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, meta);
super(name, indexed, stored, hasDocValues, false, meta);
this.queryProcessor = new VectorGeoShapeQueryProcessor();
}

View File

@ -117,6 +117,11 @@ public class IdFieldMapper extends MetadataFieldMapper {
return true;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
return termsQuery(Arrays.asList(value), context);

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
@ -74,6 +75,11 @@ public final class IgnoredFieldMapper extends MetadataFieldMapper {
// field is bounded by the number of fields in the mappings.
return new TermRangeQuery(name(), null, null, true, true);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}
private IgnoredFieldMapper() {

View File

@ -71,6 +71,11 @@ public class IndexFieldMapper extends MetadataFieldMapper {
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new ConstantIndexFieldData.Builder(mapperService -> fullyQualifiedIndexName, name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}
public IndexFieldMapper() {

View File

@ -111,7 +111,8 @@ public class IpFieldMapper extends ParametrizedFieldMapper {
@Override
public IpFieldMapper build(BuilderContext context) {
return new IpFieldMapper(name,
new IpFieldType(buildFullName(context), indexed.getValue(), stored.getValue(), hasDocValues.getValue(), meta.getValue()),
new IpFieldType(buildFullName(context), indexed.getValue(), stored.getValue(),
hasDocValues.getValue(), parseNullValue(), meta.getValue()),
multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
@ -124,12 +125,16 @@ public class IpFieldMapper extends ParametrizedFieldMapper {
public static final class IpFieldType extends SimpleMappedFieldType {
public IpFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, Map<String, String> meta) {
private final InetAddress nullValue;
public IpFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
InetAddress nullValue, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.nullValue = nullValue;
}
public IpFieldType(String name) {
this(name, true, false, true, Collections.emptyMap());
this(name, true, false, true, null, Collections.emptyMap());
}
@Override
@ -148,6 +153,25 @@ public class IpFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected Object parseSourceValue(Object value) {
InetAddress address;
if (value instanceof InetAddress) {
address = (InetAddress) value;
} else {
address = InetAddresses.forString(value.toString());
}
return InetAddresses.toAddrString(address);
}
};
}
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
failIfNotIndexed();
@ -411,25 +435,6 @@ public class IpFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected Object parseSourceValue(Object value) {
InetAddress address;
if (value instanceof InetAddress) {
address = (InetAddress) value;
} else {
address = InetAddresses.forString(value.toString());
}
return InetAddresses.toAddrString(address);
}
};
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), ignoreMalformedByDefault, indexCreatedVersion).init(this);

View File

@ -159,9 +159,7 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
else if (splitQueriesOnWhitespace.getValue()) {
searchAnalyzer = Lucene.WHITESPACE_ANALYZER;
}
return new KeywordFieldType(buildFullName(context), hasDocValues.getValue(), fieldType,
eagerGlobalOrdinals.getValue(), normalizer, searchAnalyzer,
similarity.getValue(), boost.getValue(), meta.getValue());
return new KeywordFieldType(buildFullName(context), fieldType, normalizer, searchAnalyzer, this);
}
@Override
@ -180,28 +178,48 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
public static final class KeywordFieldType extends StringFieldType {
public KeywordFieldType(String name, boolean hasDocValues, FieldType fieldType,
boolean eagerGlobalOrdinals, NamedAnalyzer normalizer, NamedAnalyzer searchAnalyzer,
SimilarityProvider similarity, float boost, Map<String, String> meta) {
super(name, fieldType.indexOptions() != IndexOptions.NONE, fieldType.stored(),
hasDocValues, new TextSearchInfo(fieldType, similarity, searchAnalyzer, searchAnalyzer), meta);
setEagerGlobalOrdinals(eagerGlobalOrdinals);
private final int ignoreAbove;
private final String nullValue;
public KeywordFieldType(String name, FieldType fieldType,
NamedAnalyzer normalizer, NamedAnalyzer searchAnalyzer, Builder builder) {
super(name,
fieldType.indexOptions() != IndexOptions.NONE,
fieldType.stored(),
builder.hasDocValues.getValue(),
new TextSearchInfo(fieldType, builder.similarity.getValue(), searchAnalyzer, searchAnalyzer),
builder.meta.getValue());
setEagerGlobalOrdinals(builder.eagerGlobalOrdinals.getValue());
setIndexAnalyzer(normalizer);
setBoost(boost);
setBoost(builder.boost.getValue());
this.ignoreAbove = builder.ignoreAbove.getValue();
this.nullValue = builder.nullValue.getValue();
}
public KeywordFieldType(String name, boolean isSearchable, boolean hasDocValues, Map<String, String> meta) {
super(name, isSearchable, false, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.ignoreAbove = Integer.MAX_VALUE;
this.nullValue = null;
}
public KeywordFieldType(String name) {
this(name, true, Defaults.FIELD_TYPE, false,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, 1.0f, Collections.emptyMap());
this(name, true, true, Collections.emptyMap());
}
public KeywordFieldType(String name, FieldType fieldType) {
super(name, fieldType.indexOptions() != IndexOptions.NONE,
false, false,
new TextSearchInfo(fieldType, null, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER),
Collections.emptyMap());
this.ignoreAbove = Integer.MAX_VALUE;
this.nullValue = null;
}
public KeywordFieldType(String name, NamedAnalyzer analyzer) {
super(name, true, false, true, new TextSearchInfo(Defaults.FIELD_TYPE, null, analyzer, analyzer), Collections.emptyMap());
this.ignoreAbove = Integer.MAX_VALUE;
this.nullValue = null;
}
@Override
@ -219,6 +237,34 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected String parseSourceValue(Object value) {
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
NamedAnalyzer normalizer = normalizer();
if (normalizer == null) {
return keywordValue;
}
try {
return normalizeValue(normalizer, name(), keywordValue);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
@ -317,7 +363,7 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
NamedAnalyzer normalizer = fieldType().normalizer();
if (normalizer != null) {
value = normalizeValue(normalizer, value);
value = normalizeValue(normalizer, name(), value);
}
// convert to utf8 only once before feeding postings/dv/stored fields
@ -336,8 +382,8 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
}
}
private String normalizeValue(NamedAnalyzer normalizer, String value) throws IOException {
try (TokenStream ts = normalizer.tokenStream(name(), value)) {
private static String normalizeValue(NamedAnalyzer normalizer, String field, String value) throws IOException {
try (TokenStream ts = normalizer.tokenStream(field, value)) {
final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
ts.reset();
if (ts.incrementToken() == false) {
@ -356,34 +402,6 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected String parseSourceValue(Object value) {
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
NamedAnalyzer normalizer = fieldType().normalizer();
if (normalizer == null) {
return keywordValue;
}
try {
return normalizeValue(normalizer, keywordValue);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -330,7 +330,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
private final LegacyGeoShapeQueryProcessor queryProcessor;
private GeoShapeFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, meta);
super(name, indexed, stored, hasDocValues, false, meta);
this.queryProcessor = new LegacyGeoShapeQueryProcessor(this);
}

View File

@ -50,6 +50,7 @@ import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -99,6 +100,11 @@ public abstract class MappedFieldType {
throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]");
}
/**
* Create a helper class to fetch field values during the {@link FetchFieldsPhase}.
*/
public abstract ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, @Nullable String format);
/** Returns the name of this type, as would be specified in mapping properties */
public abstract String typeName();

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Map;
@ -176,8 +175,4 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
// do nothing
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}

View File

@ -127,9 +127,8 @@ public class NumberFieldMapper extends ParametrizedFieldMapper {
@Override
public NumberFieldMapper build(BuilderContext context) {
return new NumberFieldMapper(name,
new NumberFieldType(buildFullName(context), type, indexed.getValue(), stored.getValue(), hasDocValues.getValue(),
meta.getValue()), multiFieldsBuilder.build(this, context), copyTo.build(), this);
MappedFieldType ft = new NumberFieldType(buildFullName(context), this);
return new NumberFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
@ -893,16 +892,25 @@ public class NumberFieldMapper extends ParametrizedFieldMapper {
public static final class NumberFieldType extends SimpleMappedFieldType {
private final NumberType type;
private final boolean coerce;
private final Number nullValue;
public NumberFieldType(String name, NumberType type, boolean isSearchable, boolean isStored,
boolean hasDocValues, Map<String, String> meta) {
boolean hasDocValues, boolean coerce, Number nullValue, Map<String, String> meta) {
super(name, isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.type = Objects.requireNonNull(type);
this.coerce = coerce;
this.nullValue = nullValue;
this.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); // allows number fields in significant text aggs - do we need this?
}
NumberFieldType(String name, Builder builder) {
this(name, builder.type, builder.indexed.getValue(), builder.stored.getValue(), builder.hasDocValues.getValue(),
builder.coerce.getValue().value(), builder.nullValue.getValue(), builder.meta.getValue());
}
public NumberFieldType(String name, NumberType type) {
this(name, type, true, false, true, Collections.emptyMap());
this(name, type, true, false, true, true, null, Collections.emptyMap());
}
@Override
@ -966,6 +974,23 @@ public class NumberFieldMapper extends ParametrizedFieldMapper {
return type.valueForSearch((Number) value);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected Object parseSourceValue(Object value) {
if (value.equals("")) {
return nullValue;
}
return type.parse(value, coerce);
}
};
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
@ -1084,23 +1109,6 @@ public class NumberFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected Object parseSourceValue(Object value) {
if (value.equals("")) {
return nullValue;
}
return fieldType().type.parse(value, coerce.value());
}
};
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), type, ignoreMalformedByDefault, coerceByDefault).init(this);

View File

@ -137,14 +137,14 @@ public class RangeFieldMapper extends ParametrizedFieldMapper {
dateTimeFormatter = DateFormatter.forPattern(format.getValue()).withLocale(locale.getValue());
}
return new RangeFieldType(buildFullName(context), index.getValue(), store.getValue(), hasDocValues.getValue(),
dateTimeFormatter, meta.getValue());
}
dateTimeFormatter, coerce.getValue().value(), meta.getValue());
}
if (type == RangeType.DATE) {
return new RangeFieldType(buildFullName(context), index.getValue(), store.getValue(), hasDocValues.getValue(),
Defaults.DATE_FORMATTER, meta.getValue());
Defaults.DATE_FORMATTER, coerce.getValue().value(), meta.getValue());
}
return new RangeFieldType(buildFullName(context), type, index.getValue(), store.getValue(), hasDocValues.getValue(),
meta.getValue());
coerce.getValue().value(), meta.getValue());
}
@Override
@ -159,32 +159,35 @@ public class RangeFieldMapper extends ParametrizedFieldMapper {
protected final RangeType rangeType;
protected final DateFormatter dateTimeFormatter;
protected final DateMathParser dateMathParser;
protected final boolean coerce;
public RangeFieldType(String name, RangeType type, boolean indexed, boolean stored,
boolean hasDocValues, Map<String, String> meta) {
boolean hasDocValues, boolean coerce, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
assert type != RangeType.DATE;
this.rangeType = Objects.requireNonNull(type);
dateTimeFormatter = null;
dateMathParser = null;
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.coerce = coerce;
}
public RangeFieldType(String name, RangeType type) {
this(name, type, true, false, true, Collections.emptyMap());
this(name, type, true, false, true, false, Collections.emptyMap());
}
public RangeFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, DateFormatter formatter,
Map<String, String> meta) {
boolean coerce, Map<String, String> meta) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.rangeType = RangeType.DATE;
this.dateTimeFormatter = Objects.requireNonNull(formatter);
this.dateMathParser = dateTimeFormatter.toDateMathParser();
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.coerce = coerce;
}
public RangeFieldType(String name, DateFormatter formatter) {
this(name, true, false, true, formatter, Collections.emptyMap());
this(name, true, false, true, formatter, false, Collections.emptyMap());
}
public RangeType rangeType() { return rangeType; }
@ -195,6 +198,37 @@ public class RangeFieldMapper extends ParametrizedFieldMapper {
return new BinaryIndexFieldData.Builder(name(), CoreValuesSourceType.RANGE);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())
: defaultFormatter;
return new SourceValueFetcher(name(), mapperService, false) {
@Override
@SuppressWarnings("unchecked")
protected Object parseSourceValue(Object value) {
RangeType rangeType = rangeType();
if (!(value instanceof Map)) {
assert rangeType == RangeType.IP;
Tuple<InetAddress, Integer> ipRange = InetAddresses.parseCidr(value.toString());
return InetAddresses.toCidrString(ipRange.v1(), ipRange.v2());
}
Map<String, Object> range = (Map<String, Object>) value;
Map<String, Object> parsedRange = new HashMap<>();
for (Map.Entry<String, Object> entry : range.entrySet()) {
Object parsedValue = rangeType.parseValue(entry.getValue(), coerce, dateMathParser);
Object formattedValue = rangeType.formatValue(parsedValue, formatter);
parsedRange.put(entry.getKey(), formattedValue);
}
return parsedRange;
}
};
}
@Override
public String typeName() {
return rangeType.name;
@ -362,37 +396,6 @@ public class RangeFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = fieldType().dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())
: defaultFormatter;
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
@SuppressWarnings("unchecked")
protected Object parseSourceValue(Object value) {
RangeType rangeType = fieldType().rangeType();
if (!(value instanceof Map)) {
assert rangeType == RangeType.IP;
Tuple<InetAddress, Integer> ipRange = InetAddresses.parseCidr(value.toString());
return InetAddresses.toCidrString(ipRange.v1(), ipRange.v2());
}
Map<String, Object> range = (Map<String, Object>) value;
Map<String, Object> parsedRange = new HashMap<>();
for (Map.Entry<String, Object> entry : range.entrySet()) {
Object parsedValue = rangeType.parseValue(entry.getValue(), coerce.value(), fieldType().dateMathParser);
Object formattedValue = rangeType.formatValue(parsedValue, formatter);
parsedRange.put(entry.getKey(), formattedValue);
}
return parsedRange;
}
};
}
private static Range parseIpRangeFromCidr(final XContentParser parser) throws IOException {
final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(parser.text());
// create the lower value by zeroing out the host portion, upper value by filling it with all ones.

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
import java.util.List;
@ -92,6 +93,11 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
public String typeName() {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}
private final boolean required;

View File

@ -122,6 +122,11 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
return Long.parseLong(value.toString());
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
long v = parse(value);

View File

@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
@ -110,6 +111,11 @@ public class SourceFieldMapper extends MetadataFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new QueryShardException(context, "The _source field is not searchable");

View File

@ -394,6 +394,11 @@ public class TextFieldMapper extends FieldMapper {
return "phrase";
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException();
@ -415,6 +420,11 @@ public class TextFieldMapper extends FieldMapper {
this.hasPositions = hasPositions;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
static boolean canMerge(PrefixFieldType first, PrefixFieldType second) {
if (first == null) {
return second == null;
@ -510,11 +520,6 @@ public class TextFieldMapper extends FieldMapper {
throw new UnsupportedOperationException();
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -541,11 +546,6 @@ public class TextFieldMapper extends FieldMapper {
throw new UnsupportedOperationException();
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -640,6 +640,19 @@ public class TextFieldMapper extends FieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value.toString();
}
};
}
@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
if (prefixFieldType == null || prefixFieldType.accept(value.length()) == false) {
@ -836,19 +849,6 @@ public class TextFieldMapper extends FieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value.toString();
}
};
}
@Override
public Iterator<Mapper> iterator() {
List<Mapper> subIterators = new ArrayList<>();

View File

@ -99,6 +99,11 @@ public class TypeFieldMapper extends MetadataFieldMapper {
return new ConstantIndexFieldData.Builder(typeFunction, name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
@Override
public boolean isSearchable() {
return true;

View File

@ -27,8 +27,8 @@ import java.io.IOException;
import java.util.List;
/**
* A helper class for fetching field values during the {@link FetchFieldsPhase}. Each {@link FieldMapper}
* is in charge of defining a value fetcher through {@link FieldMapper#valueFetcher}.
* A helper class for fetching field values during the {@link FetchFieldsPhase}. Each {@link MappedFieldType}
* is in charge of defining a value fetcher through {@link MappedFieldType#valueFetcher}.
*/
public interface ValueFetcher {
/**

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Collections;
@ -53,6 +54,11 @@ public class VersionFieldMapper extends MetadataFieldMapper {
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "The _version field is not searchable");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}
private VersionFieldMapper() {

View File

@ -21,11 +21,8 @@ package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.FieldAliasMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -46,7 +43,7 @@ public class FieldFetcher {
public static FieldFetcher create(MapperService mapperService,
SearchLookup searchLookup,
Collection<FieldAndFormat> fieldAndFormats) {
MappingLookup fieldMappers = mapperService.documentMapper().mappers();
List<FieldContext> fieldContexts = new ArrayList<>();
for (FieldAndFormat fieldAndFormat : fieldAndFormats) {
@ -55,19 +52,11 @@ public class FieldFetcher {
Collection<String> concreteFields = mapperService.simpleMatchToFullName(fieldPattern);
for (String field : concreteFields) {
Mapper mapper = fieldMappers.getMapper(field);
if (mapper == null || mapperService.isMetadataField(field)) {
MappedFieldType ft = mapperService.fieldType(field);
if (ft == null || mapperService.isMetadataField(field)) {
continue;
}
if (mapper instanceof FieldAliasMapper) {
String target = ((FieldAliasMapper) mapper).path();
mapper = fieldMappers.getMapper(target);
assert mapper instanceof FieldMapper;
}
FieldMapper fieldMapper = (FieldMapper) mapper;
ValueFetcher valueFetcher = fieldMapper.valueFetcher(mapperService, searchLookup, format);
ValueFetcher valueFetcher = ft.valueFetcher(mapperService, searchLookup, format);
fieldContexts.add(new FieldContext(field, valueFetcher));
}
}

View File

@ -47,6 +47,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndexTemplateMissingException;
import org.elasticsearch.indices.IndicesService;
@ -54,6 +55,7 @@ import org.elasticsearch.indices.InvalidIndexTemplateException;
import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@ -1586,6 +1588,11 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
public MetadataTimestampFieldMapper(boolean enabled) {
super(new MappedFieldType("_data_stream_timestamp", false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) {
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "_data_stream_timestamp";

View File

@ -312,17 +312,17 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
public void testRequireDocValuesOnLongs() {
doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG));
doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG,
true, false, false, Collections.emptyMap()));
true, false, false, false, null, Collections.emptyMap()));
}
public void testRequireDocValuesOnDoubles() {
doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE));
doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE,
true, false, false, Collections.emptyMap()));
true, false, false, false, null, Collections.emptyMap()));
}
public void testRequireDocValuesOnBools() {
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field"));
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field", true, false, false, Collections.emptyMap()));
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field", true, false, false, null, Collections.emptyMap()));
}
}

View File

@ -27,19 +27,13 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.ParseContext.Document;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class BooleanFieldMapperTests extends MapperTestCase {
@ -198,20 +192,4 @@ public class BooleanFieldMapperTests extends MapperTestCase {
assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null));
assertParseMaximalWarnings();
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
BooleanFieldMapper mapper = new BooleanFieldMapper.Builder("field").build(context);
assertEquals(List.of(true), fetchSourceValue(mapper, true));
assertEquals(List.of(false), fetchSourceValue(mapper, "false"));
assertEquals(List.of(false), fetchSourceValue(mapper, ""));
Map<String, Object> mapping = org.elasticsearch.common.collect.Map.of("type", "boolean", "null_value", true);
BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder("field");
builder.parse("field", null, new HashMap<>(mapping));
BooleanFieldMapper nullValueMapper = builder.build(context);
assertEquals(List.of(true), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermQuery;
import java.io.IOException;
import java.util.Collections;
public class BooleanFieldTypeTests extends FieldTypeTestCase {
@ -45,9 +46,22 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
assertEquals(new TermQuery(new Term("field", "T")), ft.termQuery("true", null));
assertEquals(new TermQuery(new Term("field", "F")), ft.termQuery("false", null));
MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType("field", false, false, true, Collections.emptyMap());
MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType("field", false);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termQuery("true", null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
}
public void testFetchSourceValue() throws IOException {
MappedFieldType fieldType = new BooleanFieldMapper.BooleanFieldType("field");
assertEquals(Collections.singletonList(true), fetchSourceValue(fieldType, true));
assertEquals(Collections.singletonList(false), fetchSourceValue(fieldType, "false"));
assertEquals(Collections.singletonList(false), fetchSourceValue(fieldType, ""));
MappedFieldType nullFieldType = new BooleanFieldMapper.BooleanFieldType(
"field", true, false, true, true, Collections.emptyMap()
);
assertEquals(Collections.singletonList(true), fetchSourceValue(nullFieldType, null));
}
}

View File

@ -33,11 +33,8 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -54,7 +51,6 @@ import org.hamcrest.core.CombinableMatcher;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
@ -751,22 +747,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
"The maximum allowed number of completion contexts in a mapping will be limited to [10] starting in version [8.0].");
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
CompletionFieldMapper mapper = new CompletionFieldMapper.Builder("completion", defaultAnalyzer, Version.CURRENT).build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(mapper, "value"));
List<String> list = org.elasticsearch.common.collect.List.of("first", "second");
assertEquals(list, fetchSourceValue(mapper, list));
Map<String, Object> object = org.elasticsearch.common.collect.Map.of(
"input", org.elasticsearch.common.collect.List.of("first", "second"), "weight", "2.718");
assertEquals(org.elasticsearch.common.collect.List.of(object), fetchSourceValue(mapper, object));
}
private Matcher<IndexableField> suggestField(String value) {
return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)),
Matchers.instanceOf(SuggestField.class));

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CompletionFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
MappedFieldType fieldType = new CompletionFieldMapper.CompletionFieldType("name", defaultAnalyzer, Collections.emptyMap());
assertEquals(Collections.singletonList("value"), fetchSourceValue(fieldType, "value"));
List<String> list = Arrays.asList("first", "second");
assertEquals(list, fetchSourceValue(fieldType, list));
Map<String, Object> object = new HashMap<>();
object.put("input", Arrays.asList("first", "second"));
object.put("weight", "2.718");
assertEquals(Collections.singletonList(object), fetchSourceValue(fieldType, object));
}
}

View File

@ -21,14 +21,10 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.search.DocValueFormat;
@ -36,8 +32,6 @@ import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
@ -311,45 +305,7 @@ public class DateFieldMapperTests extends MapperTestCase {
assertThat(e.getMessage(), containsString("Error parsing [format] on field [field]: Invalid"));
}
public void testFetchSourceValue() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.MILLISECONDS, null);
String date = "2020-05-15T21:33:02.000Z";
assertEquals(List.of(date), fetchSourceValue(mapper, date));
assertEquals(List.of(date), fetchSourceValue(mapper, 1589578382000L));
DateFieldMapper mapperWithFormat = createMapper(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis");
String dateInFormat = "1990/12/29";
assertEquals(List.of(dateInFormat), fetchSourceValue(mapperWithFormat, dateInFormat));
assertEquals(List.of(dateInFormat), fetchSourceValue(mapperWithFormat, 662428800000L));
DateFieldMapper mapperWithMillis = createMapper(Resolution.MILLISECONDS, "epoch_millis");
String dateInMillis = "662428800000";
assertEquals(List.of(dateInMillis), fetchSourceValue(mapperWithMillis, dateInMillis));
assertEquals(List.of(dateInMillis), fetchSourceValue(mapperWithMillis, 662428800000L));
String nullValueDate = "2020-05-15T21:33:02.000Z";
DateFieldMapper nullValueMapper = createMapper(Resolution.MILLISECONDS, null, nullValueDate);
assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
public void testParseSourceValueWithFormat() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z");
String date = "1990-12-29T00:00:00.000Z";
assertEquals(List.of("1990/12/29"), fetchSourceValue(mapper, date, "yyyy/MM/dd"));
assertEquals(List.of("662428800000"), fetchSourceValue(mapper, date, "epoch_millis"));
assertEquals(List.of("1970/12/29"), fetchSourceValue(mapper, null, "yyyy/MM/dd"));
}
public void testParseSourceValueNanos() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis");
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals(List.of("2020-05-15T21:33:02.123456789Z"), fetchSourceValue(mapper, date));
assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchSourceValue(mapper, 1589578382123L));
String nullValueDate = "2020-05-15T21:33:02.123456789Z";
DateFieldMapper nullValueMapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate);
assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
public void testFetchDocValuesMillis() throws IOException {
MapperService mapperService = createMapperService(
@ -372,27 +328,4 @@ public class DateFieldMapperTests extends MapperTestCase {
assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date));
assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchFromDocValues(mapperService, ft, format, 1589578382123L));
}
private DateFieldMapper createMapper(Resolution resolution, String format) {
return createMapper(resolution, format, null);
}
private DateFieldMapper createMapper(Resolution resolution, String format, String nullValue) {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
Map<String, Object> mapping = new HashMap<>();
mapping.put("type", "date_nanos");
if (format != null) {
mapping.put("format", format);
}
if (nullValue != null) {
mapping.put("null_value", nullValue);
}
DateFieldMapper.Builder builder
= new DateFieldMapper.Builder("field", resolution, null, false, Version.CURRENT);
builder.parse("field", null, mapping);
return builder.build(context);
}
}

View File

@ -35,14 +35,15 @@ import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
@ -70,14 +71,12 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
}
public void testIsFieldWithinQueryDateMillis() throws IOException {
DateFieldType ft = new DateFieldType("my_date", true, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, Collections.emptyMap());
DateFieldType ft = new DateFieldType("my_date", Resolution.MILLISECONDS);
isFieldWithinRangeTestCase(ft);
}
public void testIsFieldWithinQueryDateNanos() throws IOException {
DateFieldType ft = new DateFieldType("my_date", true, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Resolution.NANOSECONDS, Collections.emptyMap());
DateFieldType ft = new DateFieldType("my_date", Resolution.NANOSECONDS);
isFieldWithinRangeTestCase(ft);
}
@ -176,7 +175,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
assertEquals(expected, ft.termQuery(date, context));
MappedFieldType unsearchable = new DateFieldType("field", false, false, true, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Resolution.MILLISECONDS, Collections.emptyMap());
Resolution.MILLISECONDS, null, Collections.emptyMap());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termQuery(date, context));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -211,7 +210,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
ft.rangeQuery("now", instant2, true, true, null, null, null, context));
MappedFieldType unsearchable = new DateFieldType("field", false, false, true, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Resolution.MILLISECONDS, Collections.emptyMap());
Resolution.MILLISECONDS, null, Collections.emptyMap());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.rangeQuery(date1, date2, true, true, null, null, null, context));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -276,4 +275,49 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
private Instant instant(String str) {
return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(str)).toInstant();
}
private static DateFieldType fieldType(Resolution resolution, String format, String nullValue) {
DateFormatter formatter = DateFormatter.forPattern(format);
return new DateFieldType("field", true, false, true, formatter, resolution, nullValue, Collections.emptyMap());
}
public void testFetchSourceValue() throws IOException {
MappedFieldType fieldType = new DateFieldType("field", Resolution.MILLISECONDS);
String date = "2020-05-15T21:33:02.000Z";
assertEquals(Collections.singletonList(date), fetchSourceValue(fieldType, date));
assertEquals(Collections.singletonList(date), fetchSourceValue(fieldType, 1589578382000L));
MappedFieldType fieldWithFormat = fieldType(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis", null);
String dateInFormat = "1990/12/29";
assertEquals(Collections.singletonList(dateInFormat), fetchSourceValue(fieldWithFormat, dateInFormat));
assertEquals(Collections.singletonList(dateInFormat), fetchSourceValue(fieldWithFormat, 662428800000L));
MappedFieldType millis = fieldType(Resolution.MILLISECONDS, "epoch_millis", null);
String dateInMillis = "662428800000";
assertEquals(Collections.singletonList(dateInMillis), fetchSourceValue(millis, dateInMillis));
assertEquals(Collections.singletonList(dateInMillis), fetchSourceValue(millis, 662428800000L));
String nullValueDate = "2020-05-15T21:33:02.000Z";
MappedFieldType nullFieldType = fieldType(Resolution.MILLISECONDS, "strict_date_time", nullValueDate);
assertEquals(Collections.singletonList(nullValueDate), fetchSourceValue(nullFieldType, null));
}
public void testParseSourceValueWithFormat() throws IOException {
MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z");
String date = "1990-12-29T00:00:00.000Z";
assertEquals(Collections.singletonList("1990/12/29"), fetchSourceValue(mapper, date, "yyyy/MM/dd"));
assertEquals(Collections.singletonList("662428800000"), fetchSourceValue(mapper, date, "epoch_millis"));
assertEquals(Collections.singletonList("1970/12/29"), fetchSourceValue(mapper, null, "yyyy/MM/dd"));
}
public void testParseSourceValueNanos() throws IOException {
MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", null);
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals(Collections.singletonList("2020-05-15T21:33:02.123456789Z"), fetchSourceValue(mapper, date));
assertEquals(Collections.singletonList("2020-05-15T21:33:02.123Z"), fetchSourceValue(mapper, 1589578382123L));
String nullValueDate = "2020-05-15T21:33:02.123456789Z";
MappedFieldType nullValueMapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate);
assertEquals(Collections.singletonList(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -70,6 +70,11 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap());
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "fake";
@ -86,11 +91,6 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
protected void parseCreateField(ParseContext context) {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
protected String contentType() {
return null;

View File

@ -104,6 +104,16 @@ public class ExternalMapper extends ParametrizedFieldMapper {
public String typeName() {
return "faketype";
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
}
private final String generatedValue;
@ -165,16 +175,6 @@ public class ExternalMapper extends ParametrizedFieldMapper {
throw new UnsupportedOperationException();
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public Iterator<Mapper> iterator() {
return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator());

View File

@ -92,6 +92,16 @@ public class FakeStringFieldMapper extends FieldMapper {
public String typeName() {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}
}
protected FakeStringFieldMapper(FieldType fieldType, MappedFieldType mappedFieldType,
@ -121,16 +131,6 @@ public class FakeStringFieldMapper extends FieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {

View File

@ -19,19 +19,13 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.lookup.SourceLookup;
import org.hamcrest.CoreMatchers;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
@ -331,39 +325,6 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase2<GeoPointField
);
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
AbstractGeometryFieldMapper<?, ?> mapper = new GeoPointFieldMapper.Builder("field").build(context);
SourceLookup sourceLookup = new SourceLookup();
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(42.0, 27.1));
Map<String, Object> otherJsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(30.0, 50.0));
String wktPoint = "POINT (42.0 27.1)";
String otherWktPoint = "POINT (30.0 50.0)";
// Test a single point in [lon, lat] array format.
Object sourceValue = List.of(42.0, 27.1);
assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single point in "lat, lon" string format.
sourceValue = "27.1,42.0";
assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of points in [lon, lat] array format.
sourceValue = List.of(List.of(42.0, 27.1), List.of(30.0, 50.0));
assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single point in well-known text format.
sourceValue = "POINT (42.0 27.1)";
assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
@Override
protected GeoPointFieldMapper.Builder newBuilder() {
return new GeoPointFieldMapper.Builder("geo");

View File

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new GeoPointFieldMapper.Builder("field").build(context).fieldType();
Map<String, Object> jsonPoint = new HashMap<>();
jsonPoint.put("type", "Point");
jsonPoint.put("coordinates", Arrays.asList(42.0, 27.1));
Map<String, Object> otherJsonPoint = new HashMap<>();
otherJsonPoint.put("type", "Point");
otherJsonPoint.put("coordinates", Arrays.asList(30.0, 50.0));
String wktPoint = "POINT (42.0 27.1)";
String otherWktPoint = "POINT (30.0 50.0)";
// Test a single point in [lon, lat] array format.
Object sourceValue = Arrays.asList(42.0, 27.1);
assertEquals(Collections.singletonList(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single point in "lat, lon" string format.
sourceValue = "27.1,42.0";
assertEquals(Collections.singletonList(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of points in [lon, lat] array format.
sourceValue = Arrays.asList(Arrays.asList(42.0, 27.1), Arrays.asList(30.0, 50.0));
assertEquals(Arrays.asList(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Arrays.asList(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single point in well-known text format.
sourceValue = "POINT (42.0 27.1)";
assertEquals(Collections.singletonList(jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
}

View File

@ -18,13 +18,10 @@
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
@ -34,7 +31,6 @@ import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
@ -237,39 +233,6 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase2<GeoShapeField
assertThat(document.docs().get(0).getFields("field").length, equalTo(2));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
GeoShapeFieldMapper mapper = new GeoShapeFieldMapper.Builder("field").build(context);
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of("type", "LineString", "coordinates",
List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
Object sourceValue = jsonLineString;
assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in geojson format.
sourceValue = List.of(jsonLineString, jsonPoint);
assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single shape in wkt format.
sourceValue = wktLineString;
assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in wkt format.
sourceValue = List.of(wktLineString, wktPoint);
assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
@Override
protected boolean supportsMeta() {
return false;

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
public class GeoShapeFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new GeoShapeFieldMapper.Builder("field").build(context).fieldType();
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of(
"type", "LineString",
"coordinates", Arrays.asList(Arrays.asList(42.0, 27.1), Arrays.asList(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of(
"type", "Point",
"coordinates", Arrays.asList(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
Object sourceValue = jsonLineString;
assertEquals(Collections.singletonList(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in geojson format.
sourceValue = Arrays.asList(jsonLineString, jsonPoint);
assertEquals(Arrays.asList(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Arrays.asList(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single shape in wkt format.
sourceValue = wktLineString;
assertEquals(Collections.singletonList(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in wkt format.
sourceValue = Arrays.asList(wktLineString, wktPoint);
assertEquals(Arrays.asList(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Arrays.asList(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
}

View File

@ -26,11 +26,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.termvectors.TermVectorsService;
@ -208,19 +204,4 @@ public class IpFieldMapperTests extends MapperTestCase {
}));
assertWarnings("Error parsing [:1] as IP in [null_value] on field [field]); [null_value] will be ignored");
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
IpFieldMapper mapper = new IpFieldMapper.Builder("field", true, Version.CURRENT).build(context);
assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1"));
assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1"));
assertEquals(List.of("::1"), fetchSourceValue(mapper, "0:0:0:0:0:0:0:1"));
IpFieldMapper nullValueMapper = new IpFieldMapper.Builder("field", true, Version.CURRENT)
.nullValue("2001:db8:0:0:0:0:2:7")
.build(context);
assertEquals(List.of("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -24,8 +24,12 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.Collections;
@ -71,7 +75,7 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
prefix = ip + "/16";
assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null));
MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, Collections.emptyMap());
MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termQuery("::1", null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -174,9 +178,26 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
InetAddresses.forString("2001:db8::")),
ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, null));
MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, Collections.emptyMap());
MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.rangeQuery("::1", "2001::", true, true, null, null, null, null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper
= new IpFieldMapper.Builder("field", true, Version.CURRENT).build(context).fieldType();
assertEquals(Collections.singletonList("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1"));
assertEquals(Collections.singletonList("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1"));
assertEquals(Collections.singletonList("::1"), fetchSourceValue(mapper, "0:0:0:0:0:0:0:1"));
MappedFieldType nullValueMapper = new IpFieldMapper.Builder("field", true, Version.CURRENT)
.nullValue("2001:db8:0:0:0:0:2:7")
.build(context)
.fieldType();
assertEquals(Collections.singletonList("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -20,14 +20,10 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -35,11 +31,9 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.index.mapper.FieldMapperTestCase.fetchSourceValue;
import static org.hamcrest.Matchers.containsString;
public class IpRangeFieldMapperTests extends ESSingleNodeTestCase {
@ -86,14 +80,4 @@ public class IpRangeFieldMapperTests extends ESSingleNodeTestCase {
assertThat(storedField.stringValue(), containsString(strVal));
}
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(context);
Map<String, Object> range = org.elasticsearch.common.collect.Map.of("gte", "2001:db8:0:0:0:0:2:1");
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("gte", "2001:db8::2:1")), fetchSourceValue(mapper, range));
assertEquals(List.of("2001:db8::2:1/32"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1/32"));
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
public class IpRangeFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(context);
Map<String, Object> range = org.elasticsearch.common.collect.Map.of("gte", "2001:db8:0:0:0:0:2:1");
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("gte", "2001:db8::2:1")),
fetchSourceValue(mapper.fieldType(), range));
assertEquals(Collections.singletonList("2001:db8::2:1/32"), fetchSourceValue(mapper.fieldType(), "2001:db8:0:0:0:0:2:1/32"));
}
}

View File

@ -31,10 +31,7 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
@ -467,35 +464,4 @@ public class KeywordFieldMapperTests extends MapperTestCase {
new String[] { "hello world" }
);
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
KeywordFieldMapper mapper = new KeywordFieldMapper.Builder("field").build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(mapper, "value"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(mapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(mapper, true));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> fetchSourceValue(mapper, "value", "format"));
assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage());
KeywordFieldMapper ignoreAboveMapper = new KeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(), fetchSourceValue(ignoreAboveMapper, "value"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(ignoreAboveMapper, true));
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers(null)).normalizer("lowercase")
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(normalizerMapper, "VALUE"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(normalizerMapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(normalizerMapper, "value"));
KeywordFieldMapper nullValueMapper = new KeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of("NULL"), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
@ -36,11 +37,20 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.CustomAnalyzer;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.LowercaseNormalizer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
@ -56,9 +66,9 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
KeywordFieldType ft = new KeywordFieldType("field");
// current impl ignores args and should always return INTERSECTS
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null,
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
randomBoolean(), randomBoolean(), null, null, null));
RandomStrings.randomAsciiLettersOfLengthBetween(random(), 0, 5),
RandomStrings.randomAsciiLettersOfLengthBetween(random(), 0, 5),
randomBoolean(), randomBoolean(), null, null, null));
}
public void testTermQuery() {
@ -110,8 +120,7 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
{
FieldType fieldType = new FieldType();
fieldType.setOmitNorms(false);
KeywordFieldType ft = new KeywordFieldType("field", false, fieldType, randomBoolean(), null, null, null, 1.0f,
Collections.emptyMap());
KeywordFieldType ft = new KeywordFieldType("field", fieldType);
assertEquals(new NormsFieldExistsQuery("field"), ft.existsQuery(null));
}
{
@ -170,4 +179,73 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
ft = new KeywordFieldType("field", Lucene.STANDARD_ANALYZER);
assertEquals(new TermQuery(new Term("field", new BytesRef("foo"))), ft.termQuery("FOO", null));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new KeywordFieldMapper.Builder("field").build(context).fieldType();
assertEquals(Collections.singletonList("value"), fetchSourceValue(mapper, "value"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(mapper, 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(mapper, true));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> fetchSourceValue(mapper, "value", "format"));
assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage());
MappedFieldType ignoreAboveMapper = new KeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context)
.fieldType();
assertEquals(Collections.emptyList(), fetchSourceValue(ignoreAboveMapper, "value"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(ignoreAboveMapper, 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(ignoreAboveMapper, true));
MappedFieldType normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers()).normalizer("lowercase")
.build(context)
.fieldType();
assertEquals(Collections.singletonList("value"), fetchSourceValue(normalizerMapper, "VALUE"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(normalizerMapper, 42L));
assertEquals(Collections.singletonList("value"), fetchSourceValue(normalizerMapper, "value"));
MappedFieldType nullValueMapper = new KeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context)
.fieldType();
assertEquals(Collections.singletonList("NULL"), fetchSourceValue(nullValueMapper, null));
}
private static IndexAnalyzers createIndexAnalyzers() {
return new IndexAnalyzers(
org.elasticsearch.common.collect.Map.of("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
org.elasticsearch.common.collect.Map.ofEntries(
org.elasticsearch.common.collect.Map.entry("lowercase",
new NamedAnalyzer("lowercase", AnalyzerScope.INDEX, new LowercaseNormalizer())),
org.elasticsearch.common.collect.Map.entry("other_lowercase",
new NamedAnalyzer("other_lowercase", AnalyzerScope.INDEX, new LowercaseNormalizer()))
),
org.elasticsearch.common.collect.Map.of(
"lowercase",
new NamedAnalyzer(
"lowercase",
AnalyzerScope.INDEX,
new CustomAnalyzer(
TokenizerFactory.newFactory("lowercase", WhitespaceTokenizer::new),
new CharFilterFactory[0],
new TokenFilterFactory[] { new TokenFilterFactory() {
@Override
public String name() {
return "lowercase";
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new org.apache.lucene.analysis.core.LowerCaseFilter(tokenStream);
}
} }
)
)
)
);
}
}

View File

@ -24,8 +24,6 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
@ -33,7 +31,6 @@ import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.geometry.Point;
@ -43,7 +40,6 @@ import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.singletonMap;
@ -635,38 +631,4 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase2<LegacyG
assertThat(fields.length, equalTo(2));
assertFieldWarnings("tree", "strategy");
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
LegacyGeoShapeFieldMapper mapper = new LegacyGeoShapeFieldMapper.Builder("field").build(context);
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of("type", "LineString", "coordinates",
List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates",
org.elasticsearch.common.collect.List.of(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
Object sourceValue = jsonLineString;
assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in geojson format.
sourceValue = List.of(jsonLineString, jsonPoint);
assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single shape in wkt format.
sourceValue = wktLineString;
assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in wkt format.
sourceValue = List.of(wktLineString, wktPoint);
assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
}

View File

@ -18,9 +18,17 @@
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase {
/**
@ -35,4 +43,39 @@ public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase {
fieldType.setStrategy(SpatialStrategy.TERM);
assertTrue(fieldType.pointsOnly());
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field").build(context).fieldType();
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of("type", "LineString", "coordinates",
Arrays.asList(Arrays.asList(42.0, 27.1), Arrays.asList(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of(
"type", "Point",
"coordinates", Arrays.asList(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
Object sourceValue = jsonLineString;
assertEquals(Collections.singletonList(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in geojson format.
sourceValue = Arrays.asList(jsonLineString, jsonPoint);
assertEquals(Arrays.asList(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Arrays.asList(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a single shape in wkt format.
sourceValue = wktLineString;
assertEquals(Collections.singletonList(jsonLineString), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Collections.singletonList(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt"));
// Test a list of shapes in wkt format.
sourceValue = Arrays.asList(wktLineString, wktPoint);
assertEquals(Arrays.asList(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(Arrays.asList(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
}

View File

@ -21,11 +21,8 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
@ -229,21 +226,6 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertFalse(dvField.fieldType().stored());
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NumberFieldMapper mapper = new NumberFieldMapper.Builder("field", NumberType.INTEGER, false, true).build(context);
assertEquals(org.elasticsearch.common.collect.List.of(3), fetchSourceValue(mapper, 3.14));
assertEquals(org.elasticsearch.common.collect.List.of(42), fetchSourceValue(mapper, "42.9"));
NumberFieldMapper nullValueMapper = new NumberFieldMapper.Builder("field", NumberType.FLOAT, false, true)
.nullValue(2.71f)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(2.71f), fetchSourceValue(nullValueMapper, ""));
assertEquals(org.elasticsearch.common.collect.List.of(2.71f), fetchSourceValue(nullValueMapper, null));
}
public void testOutOfRangeValues() throws IOException {
final List<OutOfRangeSpec> inputs = Arrays.asList(
OutOfRangeSpec.of(NumberType.BYTE, "128", "is out of range for a byte"),

View File

@ -131,12 +131,15 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
assertTrue(ft.termQuery(42.1, null) instanceof MatchNoDocsQuery);
}
private static MappedFieldType unsearchable() {
return new NumberFieldType("field", NumberType.LONG, false, false, true, true, null, Collections.emptyMap());
}
public void testTermQuery() {
MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG);
assertEquals(LongPoint.newExactQuery("field", 42), ft.termQuery("42", null));
MappedFieldType unsearchable
= new NumberFieldType("field", NumberType.LONG, false, false, true, Collections.emptyMap());
MappedFieldType unsearchable = unsearchable();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.termQuery("42", null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -253,7 +256,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
SortedNumericDocValuesField.newSlowRangeQuery("field", 1, 3));
assertEquals(expected, ft.rangeQuery("1", "3", true, true, null, null, null, MOCK_QSC));
MappedFieldType unsearchable = new NumberFieldType("field", NumberType.LONG, false, false, true, Collections.emptyMap());
MappedFieldType unsearchable = unsearchable();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> unsearchable.rangeQuery("1", "3", true, true, null, null, null, MOCK_QSC));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
@ -639,4 +642,22 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
assertThat(NumberType.HALF_FLOAT.parsePoint(bytes), equalTo(value));
}
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new NumberFieldMapper.Builder("field", NumberType.INTEGER, false, true)
.build(context)
.fieldType();
assertEquals(Collections.singletonList(3), fetchSourceValue(mapper, 3.14));
assertEquals(Collections.singletonList(42), fetchSourceValue(mapper, "42.9"));
MappedFieldType nullValueMapper = new NumberFieldMapper.Builder("field", NumberType.FLOAT, false, true)
.nullValue(2.71f)
.build(context)
.fieldType();
assertEquals(Collections.singletonList(2.71f), fetchSourceValue(nullValueMapper, ""));
assertEquals(Collections.singletonList(2.71f), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -35,7 +35,6 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper.Parameter;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
@ -193,11 +192,6 @@ public class ParametrizedMapperTests extends MapperServiceTestCase {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return null;
}
@Override
protected String contentType() {
return "test_mapper";

View File

@ -22,13 +22,9 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -36,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
@ -367,39 +362,4 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertThat(e.getMessage(), containsString("Invalid format: [[test_format]]: Unknown pattern letter: t"));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(context);
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L)),
fetchSourceValue(longMapper, longRange));
RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true)
.format("yyyy/MM/dd||epoch_millis")
.build(context);
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", 597429487111L);
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", "1988/12/06")),
fetchSourceValue(dateMapper, dateRange));
}
public void testParseSourceValueWithFormat() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(context);
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L)),
fetchSourceValue(longMapper, longRange));
RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true)
.format("strict_date_time")
.build(context);
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990-12-29T00:00:00.000Z");
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29")),
fetchSourceValue(dateMapper, dateRange, "yyy/MM/dd"));
assertEquals(List.of(org.elasticsearch.common.collect.Map.of("lt", "662428800000")),
fetchSourceValue(dateMapper, dateRange,"epoch_millis"));
}
}

View File

@ -47,8 +47,10 @@ import org.elasticsearch.test.IndexSettingsModule;
import org.joda.time.DateTime;
import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
@ -66,9 +68,9 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
private RangeFieldType createDefaultFieldType() {
if (type == RangeType.DATE) {
return new RangeFieldType("field", true, false, true, RangeFieldMapper.Defaults.DATE_FORMATTER, Collections.emptyMap());
return new RangeFieldType("field", RangeFieldMapper.Defaults.DATE_FORMATTER);
}
return new RangeFieldType("field", type, true, false, true, Collections.emptyMap());
return new RangeFieldType("field", type);
}
public void testRangeQuery() throws Exception {
@ -216,8 +218,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
public void testDateRangeQueryUsingMappingFormat() {
QueryShardContext context = createContext();
RangeFieldType strict
= new RangeFieldType("field", true, false, false, RangeFieldMapper.Defaults.DATE_FORMATTER, Collections.emptyMap());
RangeFieldType strict = new RangeFieldType("field", RangeFieldMapper.Defaults.DATE_FORMATTER);
// don't use DISJOINT here because it doesn't work on date fields which we want to compare bounds with
ShapeRelation relation = randomValueOtherThan(ShapeRelation.DISJOINT,() -> randomFrom(ShapeRelation.values()));
@ -236,13 +237,13 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
assertEquals(1465975790000L, formatter.parseMillis(from));
assertEquals(1466062190000L, formatter.parseMillis(to));
RangeFieldType fieldType = new RangeFieldType("field", true, false, true, formatter, Collections.emptyMap());
RangeFieldType fieldType = new RangeFieldType("field", formatter);
final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context);
assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", query.toString());
// compare lower and upper bounds with what we would get on a `date` field
DateFieldType dateFieldType
= new DateFieldType("field", true, false, true, formatter, DateFieldMapper.Resolution.MILLISECONDS, Collections.emptyMap());
= new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter);
final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context);
assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString());
}
@ -259,7 +260,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
long lower = randomLongBetween(formatter.parseMillis("2000-01-01T00:00"), formatter.parseMillis("2010-01-01T00:00"));
long upper = randomLongBetween(formatter.parseMillis("2011-01-01T00:00"), formatter.parseMillis("2020-01-01T00:00"));
RangeFieldType fieldType = new RangeFieldType("field", true, false, false, formatter, Collections.emptyMap());
RangeFieldType fieldType = new RangeFieldType("field", true, false, false, formatter, false, null);
String lowerAsString = formatter.formatMillis(lower);
String upperAsString = formatter.formatMillis(upper);
// also add date math rounding to days occasionally
@ -481,7 +482,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
assertEquals(getExpectedRangeQuery(relation, value, value, includeLower, includeUpper),
ft.termQuery(value, context));
}
public void testCaseInsensitiveQuery() throws Exception {
QueryShardContext context = createContext();
RangeFieldType ft = createDefaultFieldType();
@ -491,4 +492,46 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
() -> ft.termQueryCaseInsensitive(value, context));
assertTrue(ex.getMessage().contains("does not support case insensitive term queries"));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true)
.build(context)
.fieldType();
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L)),
fetchSourceValue(longMapper, longRange));
MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true)
.format("yyyy/MM/dd||epoch_millis")
.build(context)
.fieldType();
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", 597429487111L);
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", "1988/12/06")),
fetchSourceValue(dateMapper, dateRange));
}
public void testParseSourceValueWithFormat() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true)
.build(context)
.fieldType();
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L)),
fetchSourceValue(longMapper, longRange));
MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true)
.format("strict_date_time")
.build(context)
.fieldType();
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990-12-29T00:00:00.000Z");
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29")),
fetchSourceValue(dateMapper, dateRange, "yyy/MM/dd"));
assertEquals(Collections.singletonList(org.elasticsearch.common.collect.Map.of("lt", "662428800000")),
fetchSourceValue(dateMapper, dateRange,"epoch_millis"));
}
}

View File

@ -50,11 +50,8 @@ import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -993,16 +990,4 @@ public class TextFieldMapperTests extends FieldMapperTestCase2<TextFieldMapper.B
assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(TextFieldMapper.class));
assertThat(mapperService.documentMapper().mappers().getMapper("other_field"), instanceOf(KeywordFieldMapper.class));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
FieldMapper fieldMapper = newBuilder().build(context);
TextFieldMapper mapper = (TextFieldMapper) fieldMapper;
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(mapper, "value"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(mapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(mapper, true));
}
}

View File

@ -35,11 +35,15 @@ import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.AutomatonQueries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -160,4 +164,15 @@ public class TextFieldTypeTests extends FieldTypeTestCase {
assertThat(q, equalTo(expected));
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new TextFieldMapper.Builder("field").build(context).fieldType();
assertEquals(Collections.singletonList("value"), fetchSourceValue(mapper, "value"));
assertEquals(Collections.singletonList("42"), fetchSourceValue(mapper, 42L));
assertEquals(Collections.singletonList("true"), fetchSourceValue(mapper, true));
}
}

View File

@ -98,7 +98,8 @@ public class AggregatorBaseTests extends ESSingleNodeTestCase {
boolean indexed,
QueryShardContext context
) {
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(fieldName, numType, indexed, false, true, Collections.emptyMap());
MappedFieldType ft
= new NumberFieldMapper.NumberFieldType(fieldName, numType, indexed, false, true, false, null, Collections.emptyMap());
return ValuesSourceConfig.resolveFieldOnly(ft, context);
}
@ -109,7 +110,7 @@ public class AggregatorBaseTests extends ESSingleNodeTestCase {
QueryShardContext context
) {
MappedFieldType ft = new DateFieldMapper.DateFieldType(fieldName, indexed, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, resolution, Collections.emptyMap());
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, resolution, null, Collections.emptyMap());
return ValuesSourceConfig.resolveFieldOnly(ft, context);
}

View File

@ -99,6 +99,6 @@ public abstract class DateHistogramAggregatorTestCase extends AggregatorTestCase
return new DateFieldMapper.DateFieldType(AGGREGABLE_DATE, isSearchable, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
useNanosecondResolution ? DateFieldMapper.Resolution.NANOSECONDS : DateFieldMapper.Resolution.MILLISECONDS,
Collections.emptyMap());
null, Collections.emptyMap());
}
}

View File

@ -272,7 +272,7 @@ public class DateRangeAggregatorTests extends AggregatorTestCase {
Consumer<InternalRange<? extends InternalRange.Bucket, ? extends InternalRange>> verify,
DateFieldMapper.Resolution resolution) throws IOException {
DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, resolution, Collections.emptyMap());
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, resolution, null, Collections.emptyMap());
DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("test_range_agg");
aggregationBuilder.field(DATE_FIELD_NAME);
aggregationBuilder.addRange("2015-01-01", "2015-12-31");

View File

@ -120,7 +120,7 @@ public class RangeAggregatorTests extends AggregatorTestCase {
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/57651")
public void testDateFieldNanosecondResolution() throws IOException {
DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, Collections.emptyMap());
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, null, Collections.emptyMap());
// These values should work because aggs scale nanosecond up to millisecond always.
long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
@ -144,7 +144,7 @@ public class RangeAggregatorTests extends AggregatorTestCase {
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/57651")
public void testMissingDateWithDateField() throws IOException {
DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, Collections.emptyMap());
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, null, Collections.emptyMap());
// These values should work because aggs scale nanosecond up to millisecond always.
long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli();

View File

@ -32,12 +32,15 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitBuilderTests;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -155,14 +158,14 @@ public class CollapseBuilderTests extends AbstractSerializingTestCase<CollapseBu
numberFieldType =
new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, true, false,
false, Collections.emptyMap());
false, false, null, Collections.emptyMap());
when(shardContext.fieldMapper("field")).thenReturn(numberFieldType);
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.build(shardContext));
assertEquals(exc.getMessage(), "cannot collapse on field `field` without `doc_values`");
numberFieldType =
new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, false, false,
true, Collections.emptyMap());
true, false, null, Collections.emptyMap());
when(shardContext.fieldMapper("field")).thenReturn(numberFieldType);
builder.setInnerHits(new InnerHitBuilder());
exc = expectThrows(IllegalArgumentException.class, () -> builder.build(shardContext));
@ -205,6 +208,11 @@ public class CollapseBuilderTests extends AbstractSerializingTestCase<CollapseBu
return null;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
return null;

View File

@ -52,10 +52,13 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -122,6 +125,11 @@ public class SliceBuilderTests extends ESTestCase {
MappedFieldType fieldType = new MappedFieldType(fieldName, true, false, dvType != null,
TextSearchInfo.NONE, Collections.emptyMap()) {
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return null;

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@ -45,8 +44,6 @@ import java.util.function.BiConsumer;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Base class for testing {@link FieldMapper}s.
@ -253,21 +250,4 @@ public abstract class FieldMapperTestCase<T extends FieldMapper.Builder<?>> exte
x.endObject().endObject();
return Strings.toString(x);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) throws IOException {
return fetchSourceValue(mapper, sourceValue, null);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) throws IOException {
String field = mapper.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = mapper.valueFetcher(mapperService, null, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);
}
}

View File

@ -19,8 +19,13 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -39,4 +44,21 @@ public abstract class FieldTypeTestCase extends ESTestCase {
when(queryShardContext.allowExpensiveQueries()).thenReturn(allowExpensiveQueries);
return queryShardContext;
}
public static List<?> fetchSourceValue(MappedFieldType fieldType, Object sourceValue) throws IOException {
return fetchSourceValue(fieldType, sourceValue, null);
}
public static List<?> fetchSourceValue(MappedFieldType fieldType, Object sourceValue, String format) throws IOException {
String field = fieldType.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = fieldType.valueFetcher(mapperService, null, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);
}
}

View File

@ -43,7 +43,6 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -58,8 +57,6 @@ import java.util.function.Supplier;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Base class for testing {@link Mapper}s.
@ -265,24 +262,8 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
assertParseMinimalWarnings();
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) throws IOException {
return fetchSourceValue(mapper, sourceValue, null);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) throws IOException {
String field = mapper.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = mapper.valueFetcher(mapperService, null, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);
}
/**
* Use a {@linkplain FieldMapper} to extract values from doc values.
* Use a {@linkplain ValueFetcher} to extract values from doc values.
*/
protected final List<?> fetchFromDocValues(MapperService mapperService, MappedFieldType ft, DocValueFormat format, Object sourceValue)
throws IOException {

View File

@ -68,6 +68,11 @@ public class MockFieldMapper extends ParametrizedFieldMapper {
public String typeName() {
return "faketype";
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
}
@Override
@ -79,11 +84,6 @@ public class MockFieldMapper extends ParametrizedFieldMapper {
protected void parseCreateField(ParseContext context) {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
public static class Builder extends ParametrizedFieldMapper.Builder {
private final MappedFieldType fieldType;

View File

@ -125,19 +125,6 @@ public class HistogramFieldMapper extends ParametrizedFieldMapper {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
public static class HistogramFieldType extends MappedFieldType {
public HistogramFieldType(String name, Map<String, String> meta) {
@ -149,6 +136,19 @@ public class HistogramFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE;
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
failIfNoDocValues();

View File

@ -432,6 +432,7 @@ public class RateAggregatorTests extends AggregatorTestCase {
true,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
DateFieldMapper.Resolution.MILLISECONDS,
null,
Collections.emptyMap()
);
}

View File

@ -17,12 +17,15 @@ import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.io.UncheckedIOException;
@ -60,6 +63,11 @@ public class DataStreamTimestampFieldMapper extends MetadataFieldMapper {
public Query existsQuery(QueryShardContext context) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support exists queries");
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
}
private static DataStreamTimestampFieldMapper toType(FieldMapper in) {

View File

@ -9,11 +9,9 @@ package org.elasticsearch.xpack.constantkeyword.mapper;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
@ -21,18 +19,14 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.MapperTestCase;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin;
import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.instanceOf;
public class ConstantKeywordFieldMapperTests extends MapperTestCase {
@ -162,24 +156,4 @@ public class ConstantKeywordFieldMapperTests extends MapperTestCase {
b.field("value", "bar");
}));
}
public void testFetchValue() throws Exception {
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "constant_keyword")));
FieldMapper fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
ValueFetcher fetcher = fieldMapper.valueFetcher(mapperService, null, null);
SourceLookup missingValueLookup = new SourceLookup();
SourceLookup nullValueLookup = new SourceLookup();
nullValueLookup.setSource(Collections.singletonMap("field", null));
assertTrue(fetcher.fetchValues(missingValueLookup).isEmpty());
assertTrue(fetcher.fetchValues(nullValueLookup).isEmpty());
merge(mapperService, fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
fetcher = fieldMapper.valueFetcher(mapperService, null, null);
assertEquals(List.of("foo"), fetcher.fetchValues(missingValueLookup));
assertEquals(List.of("foo"), fetcher.fetchValues(nullValueLookup));
}
}

View File

@ -131,6 +131,17 @@ public class ConstantKeywordFieldMapper extends ParametrizedFieldMapper {
return new ConstantIndexFieldData.Builder(mapperService -> value, name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value == null
? lookup -> Collections.emptyList()
: lookup -> Collections.singletonList(value);
}
@Override
protected boolean matches(String pattern, boolean caseInsensitive, QueryShardContext context) {
if (value == null) {
@ -251,17 +262,6 @@ public class ConstantKeywordFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return fieldType().value == null
? lookup -> org.elasticsearch.common.collect.List.of()
: lookup -> org.elasticsearch.common.collect.List.of(fieldType().value);
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -11,6 +11,9 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType;
import java.util.Arrays;
@ -100,4 +103,22 @@ public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase {
assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("f.o", RegExp.ALL, 0, 10, null, null));
assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("f..o", RegExp.ALL, 0, 10, null, null));
}
public void testFetchValue() throws Exception {
MappedFieldType fieldType = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", null);
ValueFetcher fetcher = fieldType.valueFetcher(null, null, null);
SourceLookup missingValueLookup = new SourceLookup();
SourceLookup nullValueLookup = new SourceLookup();
nullValueLookup.setSource(Collections.singletonMap("field", null));
assertTrue(fetcher.fetchValues(missingValueLookup).isEmpty());
assertTrue(fetcher.fetchValues(nullValueLookup).isEmpty());
MappedFieldType valued = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", "foo");
fetcher = valued.valueFetcher(null, null, null);
assertEquals(Collections.singletonList("foo"), fetcher.fetchValues(missingValueLookup));
assertEquals(Collections.singletonList("foo"), fetcher.fetchValues(nullValueLookup));
}
}

View File

@ -9,24 +9,18 @@ package org.elasticsearch.xpack.flattened.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperTestCase;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
@ -42,7 +36,6 @@ import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
@ -512,18 +505,4 @@ public class FlatObjectFieldMapperTests extends FieldMapperTestCase<FlatObjectFi
assertTokenStreamContents(keyedFieldType.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] {"Hello", "World"});
}
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
Map<String, Object> sourceValue = org.elasticsearch.common.collect.Map.of("key", "value");
FlatObjectFieldMapper mapper = new FlatObjectFieldMapper.Builder("field").build(context);
assertEquals(List.of(sourceValue), fetchSourceValue(mapper, sourceValue));
FlatObjectFieldMapper nullValueMapper = new FlatObjectFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null));
}
}

View File

@ -178,7 +178,8 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
@Override
public FlatObjectFieldMapper build(BuilderContext context) {
MappedFieldType ft = new RootFlatObjectFieldType(buildFullName(context), indexed, hasDocValues, meta, splitQueriesOnWhitespace);
MappedFieldType ft
= new RootFlatObjectFieldType(buildFullName(context), indexed, hasDocValues, meta, splitQueriesOnWhitespace, nullValue);
if (eagerGlobalOrdinals) {
ft.setEagerGlobalOrdinals(true);
}
@ -324,6 +325,11 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
failIfNoDocValues();
return new KeyedFlatObjectFieldData.Builder(name(), key, CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException(); // TODO can we implement this?
}
}
/**
@ -437,13 +443,15 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
*/
public static final class RootFlatObjectFieldType extends StringFieldType {
private final boolean splitQueriesOnWhitespace;
private final String nullValue;
public RootFlatObjectFieldType(String name, boolean indexed, boolean hasDocValues, Map<String, String> meta,
boolean splitQueriesOnWhitespace) {
boolean splitQueriesOnWhitespace, String nullValue) {
super(name, indexed, false, hasDocValues,
splitQueriesOnWhitespace ? TextSearchInfo.WHITESPACE_MATCH_ONLY : TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.splitQueriesOnWhitespace = splitQueriesOnWhitespace;
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.nullValue = nullValue;
}
@Override
@ -465,6 +473,19 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
failIfNoDocValues();
return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES);
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false, nullValue) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
}
private FlatObjectFieldParser fieldParser;
@ -543,19 +564,6 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
}
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);

View File

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.flattened.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
public class FlatObjectFieldTypeTests extends FieldTypeTestCase {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
Map<String, Object> sourceValue = org.elasticsearch.common.collect.Map.of("key", "value");
MappedFieldType mapper = new FlatObjectFieldMapper.Builder("field").build(context).fieldType();
assertEquals(Collections.singletonList(sourceValue), fetchSourceValue(mapper, sourceValue));
MappedFieldType nullValueMapper = new FlatObjectFieldMapper.Builder("field")
.nullValue("NULL")
.build(context)
.fieldType();
assertEquals(Collections.singletonList("NULL"), fetchSourceValue(nullValueMapper, null));
}
}

Some files were not shown because too many files have changed in this diff Show More