Add search fields parameter to support high-level field retrieval. (#60258)

This feature adds a new `fields` parameter to the search request, which
consults both the document `_source` and the mappings to fetch fields in a
consistent way. The PR merges the `field-retrieval` feature branch.

Addresses #49028 and #55363.
This commit is contained in:
Julie Tibshirani 2020-07-28 10:58:20 -07:00 committed by GitHub
parent 025e7bee80
commit c7bfb5de41
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
117 changed files with 3234 additions and 398 deletions

View File

@ -140,23 +140,28 @@ Closure setupTwitter = { String name, int count ->
type: date type: date
likes: likes:
type: long type: long
location:
properties:
city:
type: keyword
country:
type: keyword
- do: - do:
bulk: bulk:
index: twitter index: twitter
refresh: true refresh: true
body: |''' body: |'''
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
String user, text String body
if (i == 0) { if (i == 0) {
user = 'kimchy' body = """{"user": "kimchy", "message": "trying out Elasticsearch", "date": "2009-11-15T14:12:12", "likes": 0,
text = 'trying out Elasticsearch' "location": { "city": "Amsterdam", "country": "Netherlands" }}"""
} else { } else {
user = 'test' body = """{"user": "test", "message": "some message with the number $i", "date": "2009-11-15T14:12:12", "likes": $i}"""
text = "some message with the number $i"
} }
buildRestTests.setups[name] += """ buildRestTests.setups[name] += """
{"index":{"_id": "$i"}} {"index":{"_id": "$i"}}
{"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}""" $body"""
} }
} }
setupTwitter('twitter', 5) setupTwitter('twitter', 5)

View File

@ -105,7 +105,8 @@ GET /twitter/_search?typed_keys
"aggregations": { "aggregations": {
"top_users": { "top_users": {
"top_hits": { "top_hits": {
"size": 1 "size": 1,
"_source": ["user", "likes", "message"]
} }
} }
} }
@ -142,9 +143,8 @@ In the response, the aggregations names will be changed to respectively `date_hi
"_id": "0", "_id": "0",
"_score": 1.0, "_score": 1.0,
"_source": { "_source": {
"date": "2009-11-15T14:12:12",
"message": "trying out Elasticsearch",
"user": "kimchy", "user": "kimchy",
"message": "trying out Elasticsearch",
"likes": 0 "likes": 0
} }
} }
@ -168,12 +168,12 @@ request. This is the case for Terms, Significant Terms and Percentiles aggregati
also contains information about the type of the targeted field: `lterms` (for a terms aggregation on a Long field), also contains information about the type of the targeted field: `lterms` (for a terms aggregation on a Long field),
`sigsterms` (for a significant terms aggregation on a String field), `tdigest_percentiles` (for a percentile `sigsterms` (for a significant terms aggregation on a String field), `tdigest_percentiles` (for a percentile
aggregation based on the TDigest algorithm). aggregation based on the TDigest algorithm).
[[indexing-aggregation-results]] [[indexing-aggregation-results]]
== Indexing aggregation results with {transforms} == Indexing aggregation results with {transforms}
<<transforms,{transforms-cap}>> enable you to convert existing {es} indices <<transforms,{transforms-cap}>> enable you to convert existing {es} indices
into summarized indices, which provide opportunities for new insights and into summarized indices, which provide opportunities for new insights and
analytics. You can use {transforms} to persistently index your aggregation analytics. You can use {transforms} to persistently index your aggregation
results into entity-centric indices. results into entity-centric indices.

View File

@ -245,7 +245,11 @@ The API returns the following result:
"user": "kimchy", "user": "kimchy",
"date": "2009-11-15T14:12:12", "date": "2009-11-15T14:12:12",
"likes": 0, "likes": 0,
"message": "trying out Elasticsearch" "message": "trying out Elasticsearch",
"location": {
"city": "Amsterdam",
"country": "Netherlands"
}
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -22,6 +22,7 @@ string:: <<text,`text`>>, <<keyword,`keyword`>> and <<wildcard,`wildcard
<<nested>>:: `nested` for arrays of JSON objects <<nested>>:: `nested` for arrays of JSON objects
[discrete] [discrete]
[[spatial_datatypes]]
=== Spatial data types === Spatial data types
<<geo-point>>:: `geo_point` for lat/lon points <<geo-point>>:: `geo_point` for lat/lon points

View File

@ -76,7 +76,8 @@ GET /cluster_one:twitter/_search
"match": { "match": {
"user": "kimchy" "user": "kimchy"
} }
} },
"_source": ["user", "message", "likes"]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -114,7 +115,6 @@ The API returns the following response:
"_score": 1, "_score": 1,
"_source": { "_source": {
"user": "kimchy", "user": "kimchy",
"date": "2009-11-15T14:12:12",
"message": "trying out Elasticsearch", "message": "trying out Elasticsearch",
"likes": 0 "likes": 0
} }
@ -148,7 +148,8 @@ GET /twitter,cluster_one:twitter,cluster_two:twitter/_search
"match": { "match": {
"user": "kimchy" "user": "kimchy"
} }
} },
"_source": ["user", "message", "likes"]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -186,7 +187,6 @@ The API returns the following response:
"_score": 2, "_score": 2,
"_source": { "_source": {
"user": "kimchy", "user": "kimchy",
"date": "2009-11-15T14:12:12",
"message": "trying out Elasticsearch", "message": "trying out Elasticsearch",
"likes": 0 "likes": 0
} }
@ -198,7 +198,6 @@ The API returns the following response:
"_score": 1, "_score": 1,
"_source": { "_source": {
"user": "kimchy", "user": "kimchy",
"date": "2009-11-15T14:12:12",
"message": "trying out Elasticsearch", "message": "trying out Elasticsearch",
"likes": 0 "likes": 0
} }
@ -210,7 +209,6 @@ The API returns the following response:
"_score": 1, "_score": 1,
"_source": { "_source": {
"user": "kimchy", "user": "kimchy",
"date": "2009-11-15T14:12:12",
"message": "trying out Elasticsearch", "message": "trying out Elasticsearch",
"likes": 0 "likes": 0
} }

View File

@ -4,33 +4,225 @@
By default, each hit in the search response includes the document By default, each hit in the search response includes the document
<<mapping-source-field,`_source`>>, which is the entire JSON object that was <<mapping-source-field,`_source`>>, which is the entire JSON object that was
provided when indexing the document. If you only need certain source fields in provided when indexing the document. To retrieve specific fields in the search
the search response, you can use the <<source-filtering,source filtering>> to response, you can use the `fields` parameter:
restrict what parts of the source are returned.
Returning fields using only the document source has some limitations: [source,console]
----
POST twitter/_search
{
"query": {
"match": {
"message": "elasticsearch"
}
},
"fields": ["user", "date"],
"_source": false
}
----
// TEST[setup:twitter]
* The `_source` field does not include <<multi-fields, multi-fields>> or The `fields` parameter consults both a document's `_source` and the index
<<alias, field aliases>>. Likewise, a field in the source does not contain mappings to load and return values. Because it makes use of the mappings,
values copied using the <<copy-to,`copy_to`>> mapping parameter. `fields` has some advantages over referencing the `_source` directly: it
* Since the `_source` is stored as a single field in Lucene, the whole source accepts <<multi-fields, multi-fields>> and <<alias, field aliases>>, and
object must be loaded and parsed, even if only a small number of fields are also formats field values like dates in a consistent way.
needed.
To avoid these limitations, you can: A document's `_source` is stored as a single field in Lucene. So the whole
`_source` object must be loaded and parsed even if only a small number of
fields are requested. To avoid this limitation, you can try another option for
loading fields:
* Use the <<docvalue-fields, `docvalue_fields`>> * Use the <<docvalue-fields, `docvalue_fields`>>
parameter to get values for selected fields. This can be a good parameter to get values for selected fields. This can be a good
choice when returning a fairly small number of fields that support doc values, choice when returning a fairly small number of fields that support doc values,
such as keywords and dates. such as keywords and dates.
* Use the <<request-body-search-stored-fields, `stored_fields`>> parameter to get the values for specific stored fields. (Fields that use the <<mapping-store,`store`>> mapping option.) * Use the <<request-body-search-stored-fields, `stored_fields`>> parameter to
get the values for specific stored fields (fields that use the
<<mapping-store,`store`>> mapping option).
You can find more detailed information on each of these methods in the You can find more detailed information on each of these methods in the
following sections: following sections:
* <<source-filtering>> * <<search-fields-param>>
* <<docvalue-fields>> * <<docvalue-fields>>
* <<stored-fields>> * <<stored-fields>>
* <<source-filtering>>
[discrete]
[[search-fields-param]]
=== Fields
The `fields` parameter allows for retrieving a list of document fields in
the search response. It consults both the document `_source` and the index
mappings to return each value in a standardized way that matches its mapping
type. By default, date fields are formatted according to the
<<mapping-date-format,date format>> parameter in their mappings.
The following search request uses the `fields` parameter to retrieve values
for the `user` field, all fields starting with `location.`, and the
`date` field:
[source,console]
----
POST twitter/_search
{
"query": {
"match": {
"message": "elasticsearch"
}
},
"fields": [
"user",
"location.*", <1>
{
"field": "date",
"format": "epoch_millis" <2>
}
],
"_source": false
}
----
// TEST[continued]
<1> Both full field names and wildcard patterns are accepted.
<2> Using object notation, you can pass a `format` parameter to apply a custom
format for the field's values. The date fields
<<date,`date`>> and <<date_nanos, `date_nanos`>> accept a
<<mapping-date-format,date format>>. <<spatial_datatypes, Spatial fields>>
accept either `geojson` for http://www.geojson.org[GeoJSON] (the default)
or `wkt` for
https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[Well Known Text].
Other field types do not support the `format` parameter.
The values are returned as a flat list in the `fields` section in each hit:
[source,console-result]
----
{
"took" : 2,
"timed_out" : false,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 1,
"relation" : "eq"
},
"max_score" : 1.0,
"hits" : [
{
"_index" : "twitter",
"_id" : "0",
"_score" : 1.0,
"_type" : "_doc",
"fields" : {
"user" : [
"kimchy"
],
"date" : [
"1258294332000"
],
"location.city": [
"Amsterdam"
],
"location.country": [
"Netherlands"
]
}
}
]
}
}
----
// TESTRESPONSE[s/"took" : 2/"took": $body.took/]
// TESTRESPONSE[s/"max_score" : 1.0/"max_score" : $body.hits.max_score/]
// TESTRESPONSE[s/"_score" : 1.0/"_score" : $body.hits.hits.0._score/]
Only leaf fields are returned -- `fields` does not allow for fetching entire
objects.
The `fields` parameter handles field types like <<alias, field aliases>> and
<<constant-keyword, `constant_keyword`>> whose values aren't always present in
the `_source`. Other mapping options are also respected, including
<<ignore-above, `ignore_above`>>, <<ignore-malformed, `ignore_malformed`>> and
<<null-value, `null_value`>>.
NOTE: The `fields` response always returns an array of values for each field,
even when there is a single value in the `_source`. This is because {es} has
no dedicated array type, and any field could contain multiple values. The
`fields` parameter also does not guarantee that array values are returned in
a specific order. See the mapping documentation on <<array, arrays>> for more
background.
[discrete]
[[docvalue-fields]]
=== Doc value fields
You can use the <<docvalue-fields,`docvalue_fields`>> parameter to return
<<doc-values,doc values>> for one or more fields in the search response.
Doc values store the same values as the `_source` but in an on-disk,
column-based structure that's optimized for sorting and aggregations. Since each
field is stored separately, {es} only reads the field values that were requested
and can avoid loading the whole document `_source`.
Doc values are stored for supported fields by default. However, doc values are
not supported for <<text,`text`>> or
{plugins}/mapper-annotated-text-usage.html[`text_annotated`] fields.
The following search request uses the `docvalue_fields` parameter to retrieve
doc values for the `user` field, all fields starting with `location.`, and the
`date` field:
[source,console]
----
GET twitter/_search
{
"query": {
"match": {
"message": "elasticsearch"
}
},
"docvalue_fields": [
"user",
"location.*", <1>
{
"field": "date",
"format": "epoch_millis" <2>
}
]
}
----
// TEST[continued]
<1> Both full field names and wildcard patterns are accepted.
<2> Using object notation, you can pass a `format` parameter to apply a custom
format for the field's doc values. <<date,Date fields>> support a
<<mapping-date-format,date `format`>>. <<number,Numeric fields>> support a
https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat
pattern]. Other field datatypes do not support the `format` parameter.
TIP: You cannot use the `docvalue_fields` parameter to retrieve doc values for
nested objects. If you specify a nested object, the search returns an empty
array (`[ ]`) for the field. To access nested fields, use the
<<request-body-search-inner-hits, `inner_hits`>> parameter's `docvalue_fields`
property.
[discrete]
[[stored-fields]]
=== Stored fields
It's also possible to store an individual field's values by using the
<<mapping-store,`store`>> mapping option. You can use the
<<request-body-search-stored-fields, `stored_fields`>> parameter to include
these stored values in the search response.
[discrete] [discrete]
[[source-filtering]] [[source-filtering]]
@ -117,71 +309,3 @@ GET /_search
} }
} }
---- ----
[discrete]
[[docvalue-fields]]
=== Doc value fields
You can use the <<docvalue-fields,`docvalue_fields`>> parameter to return
<<doc-values,doc values>> for one or more fields in the search response.
Doc values store the same values as the `_source` but in an on-disk,
column-based structure that's optimized for sorting and aggregations. Since each
field is stored separately, {es} only reads the field values that were requested
and can avoid loading the whole document `_source`.
Doc values are stored for supported fields by default. However, doc values are
not supported for <<text,`text`>> or
{plugins}/mapper-annotated-text-usage.html[`text_annotated`] fields.
The following search request uses the `docvalue_fields` parameter to
retrieve doc values for the following fields:
* Fields with names starting with `my_ip`
* `my_keyword_field`
* Fields with names ending with `_date_field`
[source,console]
----
GET /_search
{
"query": {
"match_all": {}
},
"docvalue_fields": [
"my_ip*", <1>
{
"field": "my_keyword_field" <2>
},
{
"field": "*_date_field",
"format": "epoch_millis" <3>
}
]
}
----
<1> Wildcard patten used to match field names, specified as a string.
<2> Wildcard patten used to match field names, specified as an object.
<3> With the object notation, you can use the `format` parameter to specify a
format for the field's returned doc values. <<date,Date fields>> support a
<<mapping-date-format,date `format`>>. <<number,Numeric fields>> support a
https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat
pattern]. Other field data types do not support the `format` parameter.
TIP: You cannot use the `docvalue_fields` parameter to retrieve doc values for
nested objects. If you specify a nested object, the search returns an empty
array (`[ ]`) for the field. To access nested fields, use the
<<request-body-search-inner-hits, `inner_hits`>> parameter's `docvalue_fields`
property.
[discrete]
[[stored-fields]]
=== Stored fields
It's also possible to store an individual field's values by using the
<<mapping-store,`store`>> mapping option. You can use the
<<request-body-search-stored-fields, `stored_fields`>> parameter to include
these stored values in the search response.

View File

@ -57,3 +57,26 @@ setup:
field: location field: location
- match: {hits.total: 1} - match: {hits.total: 1}
---
"Test retrieve geo_shape field":
- do:
search:
index: test
body:
fields: [location]
_source: false
- match: { hits.hits.0.fields.location.0.type: "Point" }
- match: { hits.hits.0.fields.location.0.coordinates: [1.0, 1.0] }
- do:
search:
index: test
body:
fields:
- field: location
format: wkt
_source: false
- match: { hits.hits.0.fields.location.0: "POINT (1.0 1.0)" }

View File

@ -152,11 +152,7 @@ public class RankFeatureFieldMapper extends FieldMapper {
float value; float value;
if (context.externalValueSet()) { if (context.externalValueSet()) {
Object v = context.externalValue(); Object v = context.externalValue();
if (v instanceof Number) { value = objectToFloat(v);
value = ((Number) v).floatValue();
} else {
value = Float.parseFloat(v.toString());
}
} else if (context.parser().currentToken() == Token.VALUE_NULL) { } else if (context.parser().currentToken() == Token.VALUE_NULL) {
// skip // skip
return; return;
@ -176,6 +172,22 @@ public class RankFeatureFieldMapper extends FieldMapper {
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value)); context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
} }
private Float objectToFloat(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
} else {
return Float.parseFloat(value.toString());
}
}
@Override
protected Float parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return objectToFloat(value);
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -159,6 +159,14 @@ public class RankFeaturesFieldMapper extends FieldMapper {
throw new AssertionError("parse is implemented directly"); throw new AssertionError("parse is implemented directly");
} }
@Override
protected Object parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value;
}
@Override @Override
protected boolean indexedByDefault() { protected boolean indexedByDefault() {
return false; return false;

View File

@ -356,6 +356,11 @@ public class ScaledFloatFieldMapper extends FieldMapper {
return (ScaledFloatFieldMapper) super.clone(); return (ScaledFloatFieldMapper) super.clone();
} }
@Override
protected Double nullValue() {
return nullValue;
}
@Override @Override
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) throws IOException {
@ -474,6 +479,26 @@ public class ScaledFloatFieldMapper extends FieldMapper {
return doubleValue; return doubleValue;
} }
@Override
protected Double parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}
double scalingFactor = fieldType().getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
private static class ScaledFloatIndexFieldData extends IndexNumericFieldData { private static class ScaledFloatIndexFieldData extends IndexNumericFieldData {
private final IndexNumericFieldData scaledFieldData; private final IndexNumericFieldData scaledFieldData;

View File

@ -418,6 +418,11 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -459,6 +464,11 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected String contentType() { protected String contentType() {
return "shingle"; return "shingle";
@ -577,6 +587,14 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value.toString();
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -158,6 +158,15 @@ public class TokenCountFieldMapper extends FieldMapper {
context.doc().addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, indexed, docValued, stored)); context.doc().addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, indexed, docValued, stored));
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value.toString();
}
/** /**
* Count position increments in a token stream. Package private for testing. * Count position increments in a token stream. Package private for testing.
* @param analyzer analyzer to create token stream * @param analyzer analyzer to create token stream

View File

@ -23,9 +23,12 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute; import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute;
import org.apache.lucene.document.FeatureField; import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
@ -186,4 +189,12 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
e.getCause().getMessage()); e.getCause().getMessage());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);
assertEquals(3.14f, mapper.parseSourceValue(3.14, null), 0.0001);
assertEquals(42.9f, mapper.parseSourceValue("42.9", null), 0.0001);
}
} }

View File

@ -21,14 +21,18 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
@ -398,4 +402,26 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString()); assertEquals(mapping3, mapper.mappingSource().toString());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
ScaledFloatFieldMapper mapper = new ScaledFloatFieldMapper.Builder("field")
.scalingFactor(100)
.build(context);
assertEquals(3.14, mapper.parseSourceValue(3.1415926, null), 0.00001);
assertEquals(3.14, mapper.parseSourceValue("3.1415", null), 0.00001);
assertNull(mapper.parseSourceValue("", null));
ScaledFloatFieldMapper nullValueMapper = new ScaledFloatFieldMapper.Builder("field")
.scalingFactor(100)
.nullValue(2.71)
.build(context);
assertEquals(2.71, nullValueMapper.parseSourceValue("", null), 0.00001);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of(2.71), nullValueMapper.lookupValues(sourceLookup, null));
}
} }

View File

@ -135,6 +135,11 @@ public class MetaJoinFieldMapper extends FieldMapper {
throw new IllegalStateException("Should never be called"); throw new IllegalStateException("Should never be called");
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source.");
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -185,6 +185,11 @@ public final class ParentIdFieldMapper extends FieldMapper {
context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue)); context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue));
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source.");
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) other; ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) other;

View File

@ -347,6 +347,14 @@ public final class ParentJoinFieldMapper extends FieldMapper {
throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called"); throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called");
} }
@Override
protected Object parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value;
}
@Override @Override
public void parse(ParseContext context) throws IOException { public void parse(ParseContext context) throws IOException {
context.path().add(simpleName()); context.path().add(simpleName());

View File

@ -371,6 +371,14 @@ public class PercolatorFieldMapper extends FieldMapper {
processQuery(query, context); processQuery(query, context);
} }
@Override
protected Object parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value;
}
static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField, static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField,
QueryBuilder queryBuilder, ParseContext context) throws IOException { QueryBuilder queryBuilder, ParseContext context) throws IOException {
if (indexVersion.onOrAfter(Version.V_6_0_0_beta2)) { if (indexVersion.onOrAfter(Version.V_6_0_0_beta2)) {

View File

@ -577,6 +577,11 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
return CONTENT_TYPE; return CONTENT_TYPE;
} }
@Override
protected String nullValue() {
return nullValue;
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) other; ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) other;
@ -731,4 +736,17 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
createFieldNamesField(context); createFieldNamesField(context);
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
return keywordValue;
}
} }

View File

@ -28,21 +28,27 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Set; import java.util.Set;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -484,4 +490,26 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(context);
assertEquals("42", mapper.parseSourceValue(42L, null));
assertEquals("true", mapper.parseSourceValue(true, null));
ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
assertNull(ignoreAboveMapper.parseSourceValue("value", null));
assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null));
assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null));
ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null));
}
} }

View File

@ -583,6 +583,14 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value.toString();
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -28,10 +28,12 @@ import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -44,8 +46,11 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
@ -672,4 +677,19 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("name cannot be empty string")); assertThat(e.getMessage(), containsString("name cannot be empty string"));
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
FieldMapper fieldMapper = new AnnotatedTextFieldMapper.Builder("field")
.indexAnalyzer(indexService.getIndexAnalyzers().getDefaultIndexAnalyzer())
.searchAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchAnalyzer())
.searchQuoteAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer())
.build(context);
AnnotatedTextFieldMapper mapper = (AnnotatedTextFieldMapper) fieldMapper;
assertEquals("value", mapper.parseSourceValue("value", null));
assertEquals("42", mapper.parseSourceValue(42L, null));
assertEquals("true", mapper.parseSourceValue(true, null));
}
} }

View File

@ -147,6 +147,14 @@ public class Murmur3FieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value.toString();
}
@Override @Override
protected boolean indexedByDefault() { protected boolean indexedByDefault() {
return false; return false;
@ -156,5 +164,4 @@ public class Murmur3FieldMapper extends FieldMapper {
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
} }
} }

View File

@ -0,0 +1,216 @@
setup:
- skip:
version: " - 7.9.99"
reason: "the 'fields' parameter was added in 7.10"
---
"Test basic field retrieval":
- do:
indices.create:
index: test
body:
mappings:
properties:
keyword:
type: keyword
integer_range:
type: integer_range
- do:
index:
index: test
id: 1
body:
keyword: [ "x", "y" ]
integer_range:
gte: 0
lte: 42
- do:
indices.refresh:
index: [ test ]
- do:
search:
index: test
body:
fields: [keyword, integer_range]
- is_true: hits.hits.0._id
- is_true: hits.hits.0._source
- match: { hits.hits.0.fields.keyword.0: x }
- match: { hits.hits.0.fields.keyword.1: y }
- match: { hits.hits.0.fields.integer_range.0.gte: 0 }
- match: { hits.hits.0.fields.integer_range.0.lte: 42 }
---
"Test date formatting":
- do:
indices.create:
index: test
body:
settings:
index.number_of_shards: 1
mappings:
properties:
keyword:
type: keyword
date:
type: date
- do:
index:
index: test
id: 1
body:
keyword: "value"
date: "1990-12-29T22:30:00.000Z"
- do:
indices.refresh:
index: [ test ]
- do:
search:
index: test
body:
fields:
- field: date
format: "yyyy/MM/dd"
- is_true: hits.hits.0._id
- is_true: hits.hits.0._source
- match: { hits.hits.0.fields.date.0: "1990/12/29" }
- do:
catch: bad_request
search:
index: test
body:
fields:
- field: keyword
format: "yyyy/MM/dd"
---
"Test disable source":
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 1
mappings:
_source:
enabled: false
properties:
keyword:
type: keyword
- do:
index:
index: test
id: 1
body:
keyword: [ "x" ]
- do:
catch: bad_request
search:
index: test
body:
fields: [keyword]
- match: { error.root_cause.0.type: "illegal_argument_exception" }
- match: { error.root_cause.0.reason: "Unable to retrieve the requested [fields] since _source is disabled
in the mappings for index [test]" }
---
"Test ignore malformed":
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 1
mappings:
properties:
keyword:
type: keyword
integer:
type: integer
ignore_malformed: true
- do:
index:
index: test
id: 1
body:
keyword: "x"
integer: 42
- do:
index:
index: test
id: 2
body:
keyword: "y"
integer: "not an integer"
- do:
indices.refresh:
index: [ test ]
- do:
search:
index: test
body:
sort: [ keyword ]
fields: [ integer ]
- match: { hits.hits.0.fields.integer.0: 42 }
- is_false: hits.hits.1.fields.integer
---
"Test disable _source loading":
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 1
mappings:
properties:
keyword:
type: keyword
integer:
type: integer
store: true
- do:
index:
index: test
id: 1
refresh: true
body:
keyword: "x"
integer: 42
- do:
search:
index: test
body:
fields: [ keyword ]
_source: false
- match: { hits.hits.0.fields.keyword.0: "x" }
- do:
search:
index: test
body:
fields: [ keyword ]
stored_fields: [ integer ]
_source: false
- match: { hits.hits.0.fields.keyword.0: "x" }
- match: { hits.hits.0.fields.integer.0: 42 }

View File

@ -314,6 +314,27 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return addDocValueField(name, null); return addDocValueField(name, null);
} }
/**
* Adds a field to load and return. The field must be present in the document _source.
*
* @param name The field to load
*/
public SearchRequestBuilder addFetchField(String name) {
sourceBuilder().fetchField(name, null);
return this;
}
/**
* Adds a field to load and return. The field must be present in the document _source.
*
* @param name The field to load
* @param format TODO(jtibs): fill this in
*/
public SearchRequestBuilder addFetchField(String name, String format) {
sourceBuilder().fetchField(name, format);
return this;
}
/** /**
* Adds a stored field to load and return (note, it must be stored) as part of the search request. * Adds a stored field to load and return (note, it must be stored) as part of the search request.
*/ */

View File

@ -104,10 +104,9 @@ public class DocumentField implements Writeable, ToXContentFragment, Iterable<Ob
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray(name); builder.startArray(name);
for (Object value : values) { for (Object value : values) {
// this call doesn't really need to support writing any kind of object. // This call doesn't really need to support writing any kind of object, since the values
// Stored fields values are converted using MappedFieldType#valueForDisplay. // here are always serializable to xContent. Each value could be a leaf types like a string,
// As a result they can either be Strings, Numbers, or Booleans, that's // number, or boolean, a list of such values, or a map of such values with string keys.
// all.
builder.value(value); builder.value(value);
} }
builder.endArray(); builder.endArray();

View File

@ -609,5 +609,4 @@ public final class GeoJson {
return builder; return builder;
} }
} }
} }

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.geometry.Geometry;
import java.io.IOException;
import java.io.UncheckedIOException;
public class GeoJsonGeometryFormat implements GeometryFormat<Geometry> {
public static final String NAME = "geojson";
private final GeoJson geoJsonParser;
public GeoJsonGeometryFormat(GeoJson geoJsonParser) {
this.geoJsonParser = geoJsonParser;
}
@Override
public String name() {
return NAME;
}
@Override
public Geometry fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null;
}
return geoJsonParser.fromXContent(parser);
}
@Override
public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (geometry != null) {
return GeoJson.toXContent(geometry, builder, params);
} else {
return builder.nullValue();
}
}
@Override
public Object toXContentAsObject(Geometry geometry) {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
GeoJson.toXContent(geometry, builder, ToXContent.EMPTY_PARAMS);
StreamInput input = BytesReference.bytes(builder).streamInput();
try (XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, input)) {
return parser.map();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}

View File

@ -31,6 +31,11 @@ import java.text.ParseException;
*/ */
public interface GeometryFormat<ParsedFormat> { public interface GeometryFormat<ParsedFormat> {
/**
* The name of the format, for example 'wkt'.
*/
String name();
/** /**
* Parser JSON representation of a geometry * Parser JSON representation of a geometry
*/ */
@ -41,4 +46,10 @@ public interface GeometryFormat<ParsedFormat> {
*/ */
XContentBuilder toXContent(ParsedFormat geometry, XContentBuilder builder, ToXContent.Params params) throws IOException; XContentBuilder toXContent(ParsedFormat geometry, XContentBuilder builder, ToXContent.Params params) throws IOException;
/**
* Serializes the geometry into a standard Java object.
*
* For example, the GeoJson format returns the geometry as a map, while WKT returns a string.
*/
Object toXContentAsObject(ParsedFormat geometry);
} }

View File

@ -22,15 +22,13 @@ package org.elasticsearch.common.geo;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.MapXContentParser; import org.elasticsearch.common.xcontent.support.MapXContentParser;
import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.GeometryCollection;
import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Point;
import org.elasticsearch.geometry.utils.StandardValidator;
import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.GeometryValidator;
import org.elasticsearch.geometry.utils.StandardValidator;
import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.geometry.utils.WellKnownText;
import java.io.IOException; import java.io.IOException;
@ -66,59 +64,31 @@ public final class GeometryParser {
/** /**
* Returns a geometry format object that can parse and then serialize the object back to the same format. * Returns a geometry format object that can parse and then serialize the object back to the same format.
*/ */
public GeometryFormat<Geometry> geometryFormat(XContentParser parser) { public GeometryFormat<Geometry> geometryFormat(String format) {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { if (format.equals(GeoJsonGeometryFormat.NAME)) {
return new GeometryFormat<Geometry>() { return new GeoJsonGeometryFormat(geoJsonParser);
@Override } else if (format.equals(WKTGeometryFormat.NAME)) {
public Geometry fromXContent(XContentParser parser) throws IOException { return new WKTGeometryFormat(wellKnownTextParser);
return null; } else {
} throw new IllegalArgumentException("Unrecognized geometry format [" + format + "].");
}
@Override }
public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (geometry != null) { /**
// We don't know the format of the original geometry - so going with default * Returns a geometry format object that can parse and then serialize the object back to the same format.
return GeoJson.toXContent(geometry, builder, params); * This method automatically recognizes the format by examining the provided {@link XContentParser}.
} else { */
return builder.nullValue(); public GeometryFormat<Geometry> geometryFormat(XContentParser parser) {
} if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
} return new GeoJsonGeometryFormat(geoJsonParser);
}; } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { return new WKTGeometryFormat(wellKnownTextParser);
return new GeometryFormat<Geometry>() { } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
@Override // We don't know the format of the original geometry - so going with default
public Geometry fromXContent(XContentParser parser) throws IOException { return new GeoJsonGeometryFormat(geoJsonParser);
return geoJsonParser.fromXContent(parser); } else {
} throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
@Override
public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (geometry != null) {
return GeoJson.toXContent(geometry, builder, params);
} else {
return builder.nullValue();
}
}
};
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
return new GeometryFormat<Geometry>() {
@Override
public Geometry fromXContent(XContentParser parser) throws IOException, ParseException {
return wellKnownTextParser.fromWKT(parser.text());
}
@Override
public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (geometry != null) {
return builder.value(wellKnownTextParser.toWKT(geometry));
} else {
return builder.nullValue();
}
}
};
} }
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
} }
/** /**

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.utils.WellKnownText;
import java.io.IOException;
import java.text.ParseException;
public class WKTGeometryFormat implements GeometryFormat<Geometry> {
public static final String NAME = "wkt";
private final WellKnownText wellKnownTextParser;
public WKTGeometryFormat(WellKnownText wellKnownTextParser) {
this.wellKnownTextParser = wellKnownTextParser;
}
@Override
public String name() {
return NAME;
}
@Override
public Geometry fromXContent(XContentParser parser) throws IOException, ParseException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null;
}
return wellKnownTextParser.fromWKT(parser.text());
}
@Override
public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (geometry != null) {
return builder.value(wellKnownTextParser.toWKT(geometry));
} else {
return builder.nullValue();
}
}
@Override
public String toXContentAsObject(Geometry geometry) {
return wellKnownTextParser.toWKT(geometry);
}
}

View File

@ -394,4 +394,17 @@ public class InetAddresses {
throw new IllegalArgumentException("Expected [ip/prefix] but was [" + maskedAddress + "]"); throw new IllegalArgumentException("Expected [ip/prefix] but was [" + maskedAddress + "]");
} }
} }
/**
* Given an address and prefix length, returns the string representation of the range in CIDR notation.
*
* See {@link #toAddrString} for details on how the address is represented.
*/
public static String toCidrString(InetAddress address, int prefixLength) {
return new StringBuilder()
.append(toAddrString(address))
.append("/")
.append(prefixLength)
.toString();
}
} }

View File

@ -97,6 +97,16 @@ public class XContentMapValues {
} }
} }
/**
* For the provided path, return its value in the xContent map.
*
* Note that in contrast with {@link XContentMapValues#extractRawValues}, array and object values
* can be returned.
*
* @param path the value's path in the map.
*
* @return the value associated with the path in the map or 'null' if the path does not exist.
*/
public static Object extractValue(String path, Map<?, ?> map) { public static Object extractValue(String path, Map<?, ?> map) {
return extractValue(map, path.split("\\.")); return extractValue(map, path.split("\\."));
} }
@ -105,19 +115,51 @@ public class XContentMapValues {
if (pathElements.length == 0) { if (pathElements.length == 0) {
return null; return null;
} }
return extractValue(pathElements, 0, map); return XContentMapValues.extractValue(pathElements, 0, map, null);
} }
@SuppressWarnings({"unchecked"}) /**
private static Object extractValue(String[] pathElements, int index, Object currentValue) { * For the provided path, return its value in the xContent map.
if (index == pathElements.length) { *
return currentValue; * Note that in contrast with {@link XContentMapValues#extractRawValues}, array and object values
} * can be returned.
if (currentValue == null) { *
* @param path the value's path in the map.
* @param nullValue a value to return if the path exists, but the value is 'null'. This helps
* in distinguishing between a path that doesn't exist vs. a value of 'null'.
*
* @return the value associated with the path in the map or 'null' if the path does not exist.
*/
public static Object extractValue(String path, Map<?, ?> map, Object nullValue) {
String[] pathElements = path.split("\\.");
if (pathElements.length == 0) {
return null; return null;
} }
return extractValue(pathElements, 0, map, nullValue);
}
private static Object extractValue(String[] pathElements,
int index,
Object currentValue,
Object nullValue) {
if (currentValue instanceof List) {
List<?> valueList = (List<?>) currentValue;
List<Object> newList = new ArrayList<>(valueList.size());
for (Object o : valueList) {
Object listValue = extractValue(pathElements, index, o, nullValue);
if (listValue != null) {
newList.add(listValue);
}
}
return newList;
}
if (index == pathElements.length) {
return currentValue != null ? currentValue : nullValue;
}
if (currentValue instanceof Map) { if (currentValue instanceof Map) {
Map map = (Map) currentValue; Map<?, ?> map = (Map<?, ?>) currentValue;
String key = pathElements[index]; String key = pathElements[index];
Object mapValue = map.get(key); Object mapValue = map.get(key);
int nextIndex = index + 1; int nextIndex = index + 1;
@ -126,18 +168,12 @@ public class XContentMapValues {
mapValue = map.get(key); mapValue = map.get(key);
nextIndex++; nextIndex++;
} }
return extractValue(pathElements, nextIndex, mapValue);
} if (map.containsKey(key) == false) {
if (currentValue instanceof List) { return null;
List valueList = (List) currentValue;
List newList = new ArrayList(valueList.size());
for (Object o : valueList) {
Object listValue = extractValue(pathElements, index, o);
if (listValue != null) {
newList.add(listValue);
}
} }
return newList;
return extractValue(pathElements, nextIndex, mapValue, nullValue);
} }
return null; return null;
} }

View File

@ -27,19 +27,25 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoJsonGeometryFormat;
import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.MapXContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException;
import java.text.ParseException; import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -77,10 +83,45 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
} }
/** /**
* interface representing parser in geometry indexing pipeline * Interface representing parser in geometry indexing pipeline.
*/ */
public interface Parser<Parsed> { public abstract static class Parser<Parsed> {
Parsed parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException; /**
* Parse the given xContent value to an object of type {@link Parsed}. The value can be
* in any supported format.
*/
public abstract Parsed parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException;
/**
* Given a parsed value and a format string, formats the value into a plain Java object.
*
* Supported formats include 'geojson' and 'wkt'. The different formats are defined
* as subclasses of {@link org.elasticsearch.common.geo.GeometryFormat}.
*/
public abstract Object format(Parsed value, String format);
/**
* Parses the given value, then formats it according to the 'format' string.
*
* By default, this method simply parses the value using {@link Parser#parse}, then formats
* it with {@link Parser#format}. However some {@link Parser} implementations override this
* as they can avoid parsing the value if it is already in the right format.
*/
public Object parseAndFormatObject(Object value, AbstractGeometryFieldMapper mapper, String format) {
Parsed geometry;
try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE,
Collections.singletonMap("dummy_field", value), XContentType.JSON)) {
parser.nextToken(); // start object
parser.nextToken(); // field name
parser.nextToken(); // field value
geometry = parse(parser, mapper);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (ParseException e) {
throw new RuntimeException(e);
}
return format(geometry, format);
}
} }
public abstract static class Builder<T extends Builder<T, FT>, FT extends AbstractGeometryFieldType> public abstract static class Builder<T extends Builder<T, FT>, FT extends AbstractGeometryFieldType>
@ -142,6 +183,17 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
} }
} }
@Override
protected Object parseSourceValue(Object value, String format) {
if (format == null) {
format = GeoJsonGeometryFormat.NAME;
}
AbstractGeometryFieldType<Parsed, Processed> mappedFieldType = fieldType();
Parser<Parsed> geometryParser = mappedFieldType.geometryParser();
return geometryParser.parseAndFormatObject(value, this, format);
}
public abstract static class TypeParser<T extends Builder> implements Mapper.TypeParser { public abstract static class TypeParser<T extends Builder> implements Mapper.TypeParser {
protected abstract T newBuilder(String name, Map<String, Object> params); protected abstract T newBuilder(String name, Map<String, Object> params);

View File

@ -23,9 +23,13 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeometryFormat;
import org.elasticsearch.common.geo.GeometryParser;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.Point;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
@ -147,7 +151,8 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
} }
} }
public ParsedPoint getNullValue() { @Override
public ParsedPoint nullValue() {
return nullValue; return nullValue;
} }
@ -158,6 +163,7 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
void validate(String fieldName); void validate(String fieldName);
void normalize(String fieldName); void normalize(String fieldName);
void resetCoords(double x, double y); void resetCoords(double x, double y);
Point asGeometry();
default boolean isNormalizable(double coord) { default boolean isNormalizable(double coord) {
return Double.isNaN(coord) == false && Double.isInfinite(coord) == false; return Double.isNaN(coord) == false && Double.isInfinite(coord) == false;
} }
@ -178,7 +184,15 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
} }
/** A parser implementation that can parse the various point formats */ /** A parser implementation that can parse the various point formats */
public static class PointParser<P extends ParsedPoint> implements Parser<List<P>> { public static class PointParser<P extends ParsedPoint> extends Parser<List<P>> {
/**
* Note that this parser is only used for formatting values.
*/
private final GeometryParser geometryParser;
public PointParser() {
this.geometryParser = new GeometryParser(true, true, true);
}
@Override @Override
public List<P> parse(XContentParser parser, AbstractGeometryFieldMapper geometryMapper) throws IOException, ParseException { public List<P> parse(XContentParser parser, AbstractGeometryFieldMapper geometryMapper) throws IOException, ParseException {
@ -238,5 +252,16 @@ public abstract class AbstractPointGeometryFieldMapper<Parsed, Processed> extend
return points; return points;
} }
} }
@Override
public Object format(List<P> points, String format) {
List<Object> result = new ArrayList<>();
GeometryFormat<Geometry> geometryFormat = geometryParser.geometryFormat(format);
for (ParsedPoint point : points) {
Geometry geometry = point.asGeometry();
result.add(geometryFormat.toXContentAsObject(geometry));
}
return result;
}
} }
} }

View File

@ -188,7 +188,14 @@ public class BinaryFieldMapper extends ParametrizedFieldMapper {
// no doc values // no doc values
createFieldNamesField(context); createFieldNamesField(context);
} }
}
@Override
protected Object parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value;
} }
@Override @Override

View File

@ -30,6 +30,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -249,6 +250,20 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
} }
} }
@Override
public Boolean parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
if (value instanceof Boolean) {
return (Boolean) value;
} else {
String textValue = value.toString();
return Booleans.parseBoolean(textValue.toCharArray(), 0, textValue.length(), false);
}
}
@Override @Override
public ParametrizedFieldMapper.Builder getMergeBuilder() { public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName()).init(this); return new Builder(simpleName()).init(this);
@ -259,4 +274,8 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
return CONTENT_TYPE; return CONTENT_TYPE;
} }
@Override
protected Object nullValue() {
return nullValue;
}
} }

View File

@ -528,6 +528,19 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
} }
} }
@Override
protected List<?> parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
if (value instanceof List) {
return (List<?>) value;
} else {
return org.elasticsearch.common.collect.List.of(value);
}
}
static class CompletionInputMetadata { static class CompletionInputMetadata {
public final String input; public final String input;
public final Map<String, Set<String>> contexts; public final Map<String, Set<String>> contexts;

View File

@ -56,6 +56,7 @@ import java.time.Instant;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.util.Arrays; import java.util.Arrays;
import java.time.ZonedDateTime;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
@ -279,6 +280,7 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
return dateMathParser; return dateMathParser;
} }
// Visible for testing.
public long parse(String value) { public long parse(String value) {
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant()); return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
} }
@ -509,6 +511,11 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
return (DateFieldMapper) super.clone(); return (DateFieldMapper) super.clone();
} }
@Override
protected String nullValue() {
return nullValueAsString;
}
@Override @Override
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) throws IOException {
String dateAsString; String dateAsString;
@ -555,6 +562,18 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
} }
} }
@Override
public String parseSourceValue(Object value, String format) {
String date = value.toString();
long timestamp = fieldType().parse(date);
ZonedDateTime dateTime = fieldType().resolution().toInstant(timestamp).atZone(ZoneOffset.UTC);
DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter();
if (format != null) {
dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale());
}
return dateTimeFormatter.format(dateTime);
}
public boolean getIgnoreMalformed() { public boolean getIgnoreMalformed() {
return ignoreMalformed; return ignoreMalformed;

View File

@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.AbstractXContentParser; import org.elasticsearch.common.xcontent.support.AbstractXContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType; import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -222,6 +224,13 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return copyTo; return copyTo;
} }
/**
* A value to use in place of a {@code null} value in the document source.
*/
protected Object nullValue() {
return null;
}
/** /**
* Whether this mapper can handle an array value during document parsing. If true, * Whether this mapper can handle an array value during document parsing. If true,
* when an array is encountered during parsing, the document parser will pass the * when an array is encountered during parsing, the document parser will pass the
@ -269,6 +278,54 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
*/ */
protected abstract void parseCreateField(ParseContext context) throws IOException; protected abstract void parseCreateField(ParseContext context) throws IOException;
/**
* Given access to a document's _source, return this field's values.
*
* In addition to pulling out the values, mappers can parse them into a standard form. This
* method delegates parsing to {@link #parseSourceValue} for parsing. Most mappers will choose
* to override {@link #parseSourceValue} -- for example numeric field mappers make sure to
* parse the source value into a number of the right type. Some mappers may need more
* flexibility and can override this entire method instead.
*
* Note that for array values, the order in which values are returned is undefined and should
* not be relied on.
*
* @param lookup a lookup structure over the document's source.
* @param format an optional format string used when formatting values, for example a date format.
* @return a list a standardized field values.
*/
public List<?> lookupValues(SourceLookup lookup, @Nullable String format) {
Object sourceValue = lookup.extractValue(name(), nullValue());
if (sourceValue == null) {
return org.elasticsearch.common.collect.List.of();
}
List<Object> values = new ArrayList<>();
if (parsesArrayValue()) {
return (List<?>) parseSourceValue(sourceValue, format);
} else {
List<?> sourceValues = sourceValue instanceof List
? (List<?>) sourceValue
: org.elasticsearch.common.collect.List.of(sourceValue);
for (Object value : sourceValues) {
Object parsedValue = parseSourceValue(value, format);
if (parsedValue != null) {
values.add(parsedValue);
}
}
}
return values;
}
/**
* Given a value that has been extracted from a document's source, parse it into a standard
* format. This parsing logic should closely mirror the value parsing in
* {@link #parseCreateField} or {@link #parse}.
*
* Note that when overriding this method, {@link #lookupValues} should *not* be overridden.
*/
protected abstract Object parseSourceValue(Object value, @Nullable String format);
protected void createFieldNamesField(ParseContext context) { protected void createFieldNamesField(ParseContext context) {
FieldNamesFieldType fieldNamesFieldType = context.docMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType(); FieldNamesFieldType fieldNamesFieldType = context.docMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType();
if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) { if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) {
@ -292,6 +349,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
} }
} }
@Override @Override
public FieldMapper merge(Mapper mergeWith) { public FieldMapper merge(Mapper mergeWith) {
FieldMapper merged = clone(); FieldMapper merged = clone();

View File

@ -37,6 +37,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
private final Map<String, MappedFieldType> fullNameToFieldType = new HashMap<>(); private final Map<String, MappedFieldType> fullNameToFieldType = new HashMap<>();
private final Map<String, String> aliasToConcreteName = new HashMap<>(); private final Map<String, String> aliasToConcreteName = new HashMap<>();
/**
* A map from field name to all fields whose content has been copied into it
* through copy_to. A field only be present in the map if some other field
* has listed it as a target of copy_to.
*
* For convenience, the set of copied fields includes the field itself.
*/
private final Map<String, Set<String>> fieldToCopiedFields = new HashMap<>();
private final DynamicKeyFieldTypeLookup dynamicKeyLookup; private final DynamicKeyFieldTypeLookup dynamicKeyLookup;
FieldTypeLookup() { FieldTypeLookup() {
@ -45,7 +54,6 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
FieldTypeLookup(Collection<FieldMapper> fieldMappers, FieldTypeLookup(Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers) { Collection<FieldAliasMapper> fieldAliasMappers) {
Map<String, DynamicKeyFieldMapper> dynamicKeyMappers = new HashMap<>(); Map<String, DynamicKeyFieldMapper> dynamicKeyMappers = new HashMap<>();
for (FieldMapper fieldMapper : fieldMappers) { for (FieldMapper fieldMapper : fieldMappers) {
@ -55,6 +63,17 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
if (fieldMapper instanceof DynamicKeyFieldMapper) { if (fieldMapper instanceof DynamicKeyFieldMapper) {
dynamicKeyMappers.put(fieldName, (DynamicKeyFieldMapper) fieldMapper); dynamicKeyMappers.put(fieldName, (DynamicKeyFieldMapper) fieldMapper);
} }
for (String targetField : fieldMapper.copyTo().copyToFields()) {
Set<String> sourcePath = fieldToCopiedFields.get(targetField);
if (sourcePath == null) {
fieldToCopiedFields.put(targetField, org.elasticsearch.common.collect.Set.of(targetField, fieldName));
} else if (sourcePath.contains(fieldName) == false) {
Set<String> newSourcePath = new HashSet<>(sourcePath);
newSourcePath.add(fieldName);
fieldToCopiedFields.put(targetField, Collections.unmodifiableSet(newSourcePath));
}
}
} }
for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) {
@ -99,6 +118,31 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
return fields; return fields;
} }
/**
* Given a field, returns its possible paths in the _source.
*
* For most fields, the source path is the same as the field itself. However
* there are some exceptions:
* - The 'source path' for a field alias is its target field.
* - For a multi-field, the source path is the parent field.
* - One field's content could have been copied to another through copy_to.
*/
public Set<String> sourcePaths(String field) {
String resolvedField = aliasToConcreteName.getOrDefault(field, field);
int lastDotIndex = resolvedField.lastIndexOf('.');
if (lastDotIndex > 0) {
String parentField = resolvedField.substring(0, lastDotIndex);
if (fullNameToFieldType.containsKey(parentField)) {
resolvedField = parentField;
}
}
return fieldToCopiedFields.containsKey(resolvedField)
? fieldToCopiedFields.get(resolvedField)
: org.elasticsearch.common.collect.Set.of(resolvedField);
}
@Override @Override
public Iterator<MappedFieldType> iterator() { public Iterator<MappedFieldType> iterator() {
Iterator<MappedFieldType> concreteFieldTypes = fullNameToFieldType.values().iterator(); Iterator<MappedFieldType> concreteFieldTypes = fullNameToFieldType.values().iterator();

View File

@ -29,10 +29,11 @@ import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData; import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData;
import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor;
import org.elasticsearch.index.mapper.GeoPointFieldMapper.ParsedGeoPoint; import org.elasticsearch.index.mapper.GeoPointFieldMapper.ParsedGeoPoint;
import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import java.io.IOException; import java.io.IOException;
@ -49,6 +50,7 @@ import java.util.Map;
public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<List<ParsedGeoPoint>, List<? extends GeoPoint>> { public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<List<ParsedGeoPoint>, List<? extends GeoPoint>> {
public static final String CONTENT_TYPE = "geo_point"; public static final String CONTENT_TYPE = "geo_point";
public static final FieldType FIELD_TYPE = new FieldType(); public static final FieldType FIELD_TYPE = new FieldType();
static { static {
FIELD_TYPE.setStored(false); FIELD_TYPE.setStored(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
@ -218,6 +220,10 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<List<P
this.reset(y, x); this.reset(y, x);
} }
public Point asGeometry() {
return new Point(lon(), lat());
}
@Override @Override
public boolean equals(Object other) { public boolean equals(Object other) {
double oLat; double oLat;

View File

@ -71,7 +71,7 @@ public class GeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<Geomet
GeoShapeFieldType ft = new GeoShapeFieldType(buildFullName(context), indexed, hasDocValues, meta); GeoShapeFieldType ft = new GeoShapeFieldType(buildFullName(context), indexed, hasDocValues, meta);
GeometryParser geometryParser = new GeometryParser(ft.orientation.getAsBoolean(), coerce().value(), GeometryParser geometryParser = new GeometryParser(ft.orientation.getAsBoolean(), coerce().value(),
ignoreZValue().value()); ignoreZValue().value());
ft.setGeometryParser((parser, mapper) -> geometryParser.parse(parser)); ft.setGeometryParser(new GeoShapeParser(geometryParser));
ft.setGeometryIndexer(new GeoShapeIndexer(orientation().value().getAsBoolean(), buildFullName(context))); ft.setGeometryIndexer(new GeoShapeIndexer(orientation().value().getAsBoolean(), buildFullName(context)));
ft.setGeometryQueryBuilder(new VectorGeoShapeQueryProcessor()); ft.setGeometryQueryBuilder(new VectorGeoShapeQueryProcessor());
ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation); ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation);

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.geo.GeometryFormat;
import org.elasticsearch.common.geo.GeometryParser;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.MapXContentParser;
import org.elasticsearch.geometry.Geometry;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.text.ParseException;
import java.util.Collections;
public class GeoShapeParser extends AbstractGeometryFieldMapper.Parser<Geometry> {
private final GeometryParser geometryParser;
public GeoShapeParser(GeometryParser geometryParser) {
this.geometryParser = geometryParser;
}
@Override
public Geometry parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException {
return geometryParser.parse(parser);
}
@Override
public Object format(Geometry value, String format) {
return geometryParser.geometryFormat(format).toXContentAsObject(value);
}
@Override
public Object parseAndFormatObject(Object value, AbstractGeometryFieldMapper mapper, String format) {
try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE,
Collections.singletonMap("dummy_field", value), XContentType.JSON)) {
parser.nextToken(); // start object
parser.nextToken(); // field name
parser.nextToken(); // field value
GeometryFormat<Geometry> geometryFormat = geometryParser.geometryFormat(parser);
if (geometryFormat.name().equals(format)) {
return value;
}
Geometry geometry = geometryFormat.fromXContent(parser);
return format(geometry, format);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (ParseException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -348,6 +348,11 @@ public class IpFieldMapper extends FieldMapper {
return fieldType().typeName(); return fieldType().typeName();
} }
@Override
protected Object nullValue() {
return nullValue;
}
@Override @Override
protected IpFieldMapper clone() { protected IpFieldMapper clone() {
return (IpFieldMapper) super.clone(); return (IpFieldMapper) super.clone();
@ -400,6 +405,21 @@ public class IpFieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
InetAddress address;
if (value instanceof InetAddress) {
address = (InetAddress) value;
} else {
address = InetAddresses.forString(value.toString());
}
return InetAddresses.toAddrString(address);
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
IpFieldMapper mergeWith = (IpFieldMapper) other; IpFieldMapper mergeWith = (IpFieldMapper) other;

View File

@ -47,6 +47,7 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -361,25 +362,9 @@ public final class KeywordFieldMapper extends FieldMapper {
return; return;
} }
final NamedAnalyzer normalizer = fieldType().normalizer(); NamedAnalyzer normalizer = fieldType().normalizer();
if (normalizer != null) { if (normalizer != null) {
try (TokenStream ts = normalizer.tokenStream(name(), value)) { value = normalizeValue(normalizer, value);
final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
ts.reset();
if (ts.incrementToken() == false) {
throw new IllegalStateException("The normalization token stream is "
+ "expected to produce exactly 1 token, but got 0 for analyzer "
+ normalizer + " and input \"" + value + "\"");
}
final String newValue = termAtt.toString();
if (ts.incrementToken()) {
throw new IllegalStateException("The normalization token stream is "
+ "expected to produce exactly 1 token, but got 2+ for analyzer "
+ normalizer + " and input \"" + value + "\"");
}
ts.end();
value = newValue;
}
} }
// convert to utf8 only once before feeding postings/dv/stored fields // convert to utf8 only once before feeding postings/dv/stored fields
@ -397,11 +382,60 @@ public final class KeywordFieldMapper extends FieldMapper {
context.doc().add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); context.doc().add(new SortedSetDocValuesField(fieldType().name(), binaryValue));
} }
} }
private String normalizeValue(NamedAnalyzer normalizer, String value) throws IOException {
try (TokenStream ts = normalizer.tokenStream(name(), value)) {
final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
ts.reset();
if (ts.incrementToken() == false) {
throw new IllegalStateException("The normalization token stream is "
+ "expected to produce exactly 1 token, but got 0 for analyzer "
+ normalizer + " and input \"" + value + "\"");
}
final String newValue = termAtt.toString();
if (ts.incrementToken()) {
throw new IllegalStateException("The normalization token stream is "
+ "expected to produce exactly 1 token, but got 2+ for analyzer "
+ normalizer + " and input \"" + value + "\"");
}
ts.end();
return newValue;
}
}
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
String keywordValue = value.toString();
if (keywordValue.length() > ignoreAbove) {
return null;
}
NamedAnalyzer normalizer = fieldType().normalizer();
if (normalizer == null) {
return keywordValue;
}
try {
return normalizeValue(normalizer, keywordValue);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;
} }
@Override
protected String nullValue() {
return nullValue;
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
KeywordFieldMapper k = (KeywordFieldMapper) other; KeywordFieldMapper k = (KeywordFieldMapper) other;

View File

@ -34,6 +34,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.GeometryParser;
import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder;
@ -44,11 +45,14 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.index.query.LegacyGeoShapeQueryProcessor; import org.elasticsearch.index.query.LegacyGeoShapeQueryProcessor;
import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.Shape;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -260,7 +264,7 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
setupFieldTypeDeprecatedParameters(context, ft); setupFieldTypeDeprecatedParameters(context, ft);
setupPrefixTrees(ft); setupPrefixTrees(ft);
ft.setGeometryIndexer(new LegacyGeoShapeIndexer(ft)); ft.setGeometryIndexer(new LegacyGeoShapeIndexer(ft));
ft.setGeometryParser(ShapeParser::parse); ft.setGeometryParser(new LegacyGeoShapeParser());
ft.setGeometryQueryBuilder(new LegacyGeoShapeQueryProcessor(ft)); ft.setGeometryQueryBuilder(new LegacyGeoShapeQueryProcessor(ft));
ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation); ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation);
return ft; return ft;
@ -282,6 +286,28 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
} }
} }
private static class LegacyGeoShapeParser extends Parser<ShapeBuilder<?, ?, ?>> {
/**
* Note that this parser is only used for formatting values.
*/
private final GeometryParser geometryParser;
private LegacyGeoShapeParser() {
this.geometryParser = new GeometryParser(true, true, true);
}
@Override
public ShapeBuilder<?, ?, ?> parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException {
return ShapeParser.parse(parser);
}
@Override
public Object format(ShapeBuilder<?, ?, ?> value, String format) {
Geometry geometry = value.buildGeometry();
return geometryParser.geometryFormat(format).toXContentAsObject(geometry);
}
}
public static final class GeoShapeFieldType extends AbstractShapeGeometryFieldType<ShapeBuilder<?, ?, ?>, Shape> { public static final class GeoShapeFieldType extends AbstractShapeGeometryFieldType<ShapeBuilder<?, ?, ?>, Shape> {
private String tree = DeprecatedParameters.Defaults.TREE; private String tree = DeprecatedParameters.Defaults.TREE;

View File

@ -752,6 +752,14 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return fieldTypes.simpleMatchToFullName(pattern); return fieldTypes.simpleMatchToFullName(pattern);
} }
/**
* Given a field name, returns its possible paths in the _source. For example,
* the 'source path' for a multi-field is the path to its parent field.
*/
public Set<String> sourcePath(String fullName) {
return fieldTypes.sourcePaths(fullName);
}
/** /**
* Returns all mapped field types. * Returns all mapped field types.
*/ */

View File

@ -89,6 +89,11 @@ public abstract class MetadataFieldMapper extends FieldMapper {
// do nothing // do nothing
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source.");
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { } protected void mergeOptions(FieldMapper other, List<String> conflicts) { }

View File

@ -1036,6 +1036,11 @@ public class NumberFieldMapper extends FieldMapper {
return (NumberFieldMapper) super.clone(); return (NumberFieldMapper) super.clone();
} }
@Override
protected Number nullValue() {
return nullValue;
}
@Override @Override
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) throws IOException {
XContentParser parser = context.parser(); XContentParser parser = context.parser();
@ -1085,6 +1090,19 @@ public class NumberFieldMapper extends FieldMapper {
} }
} }
@Override
protected Number parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
if (value.equals("")) {
return nullValue;
}
return fieldType().type.parse(value, coerce.value());
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
NumberFieldMapper m = (NumberFieldMapper) other; NumberFieldMapper m = (NumberFieldMapper) other;

View File

@ -53,6 +53,7 @@ import java.net.UnknownHostException;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -380,6 +381,31 @@ public class RangeFieldMapper extends FieldMapper {
} }
} }
@Override
@SuppressWarnings("unchecked")
protected Object parseSourceValue(Object value, String format) {
RangeType rangeType = fieldType().rangeType();
if (!(value instanceof Map)) {
assert rangeType == RangeType.IP;
Tuple<InetAddress, Integer> ipRange = InetAddresses.parseCidr(value.toString());
return InetAddresses.toCidrString(ipRange.v1(), ipRange.v2());
}
DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter();
if (format != null) {
dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale());
}
Map<String, Object> range = (Map<String, Object>) value;
Map<String, Object> parsedRange = new HashMap<>();
for (Map.Entry<String, Object> entry : range.entrySet()) {
Object parsedValue = rangeType.parseValue(entry.getValue(), coerce.value(), fieldType().dateMathParser);
Object formattedValue = rangeType.formatValue(parsedValue, dateTimeFormatter);
parsedRange.put(entry.getKey(), formattedValue);
}
return parsedRange;
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
RangeFieldMapper mergeWith = (RangeFieldMapper) other; RangeFieldMapper mergeWith = (RangeFieldMapper) other;

View File

@ -37,14 +37,17 @@ import org.apache.lucene.util.FutureArrays;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.time.Instant;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -69,8 +72,9 @@ public enum RangeType {
InetAddress address = InetAddresses.forString(parser.text()); InetAddress address = InetAddresses.forString(parser.text());
return included ? address : nextDown(address); return included ? address : nextDown(address);
} }
@Override @Override
public InetAddress parse(Object value, boolean coerce) { public InetAddress parseValue(Object value, boolean coerce, @Nullable DateMathParser dateMathParser) {
if (value instanceof InetAddress) { if (value instanceof InetAddress) {
return (InetAddress) value; return (InetAddress) value;
} else { } else {
@ -80,6 +84,12 @@ public enum RangeType {
return InetAddresses.forString(value.toString()); return InetAddresses.forString(value.toString());
} }
} }
@Override
public Object formatValue(Object value, DateFormatter dateFormatter) {
return InetAddresses.toAddrString((InetAddress) value);
}
@Override @Override
public InetAddress minValue() { public InetAddress minValue() {
return InetAddressPoint.MIN_VALUE; return InetAddressPoint.MIN_VALUE;
@ -170,22 +180,34 @@ public enum RangeType {
public Field getRangeField(String name, RangeFieldMapper.Range r) { public Field getRangeField(String name, RangeFieldMapper.Range r) {
return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()}); return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()});
} }
private Number parse(DateMathParser dateMathParser, String dateStr) {
return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");})
.toEpochMilli();
}
@Override @Override
public Number parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) public Number parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
throws IOException { throws IOException {
Number value = parse(fieldType.dateMathParser, parser.text()); Number value = parseValue(parser.text(), coerce, fieldType.dateMathParser);
return included ? value : nextUp(value); return included ? value : nextUp(value);
} }
@Override @Override
public Number parseTo(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) public Number parseTo(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
throws IOException{ throws IOException{
Number value = parse(fieldType.dateMathParser, parser.text()); Number value = parseValue(parser.text(), coerce, fieldType.dateMathParser);
return included ? value : nextDown(value); return included ? value : nextDown(value);
} }
@Override
public Long parseValue(Object dateStr, boolean coerce, @Nullable DateMathParser dateMathParser) {
assert dateMathParser != null;
return dateMathParser.parse(dateStr.toString(), () -> {
throw new IllegalArgumentException("now is not used at indexing time");
}).toEpochMilli();
}
@Override
public Object formatValue(Object value, DateFormatter dateFormatter) {
long timestamp = (long) value;
ZonedDateTime dateTime = Instant.ofEpochMilli(timestamp).atZone(ZoneOffset.UTC);
return dateFormatter.format(dateTime);
}
@Override @Override
public Long minValue() { public Long minValue() {
return Long.MIN_VALUE; return Long.MIN_VALUE;
@ -243,6 +265,7 @@ public enum RangeType {
return createRangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation); return createRangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation);
} }
@Override @Override
public Query withinQuery(String field, Object from, Object to, boolean includeLower, boolean includeUpper) { public Query withinQuery(String field, Object from, Object to, boolean includeLower, boolean includeUpper) {
return LONG.withinQuery(field, from, to, includeLower, includeUpper); return LONG.withinQuery(field, from, to, includeLower, includeUpper);
@ -598,6 +621,15 @@ public enum RangeType {
} }
return fields; return fields;
} }
public Object parseValue(Object value, boolean coerce, @Nullable DateMathParser dateMathParser) {
return numberType.parse(value, coerce);
}
public Object formatValue(Object value, DateFormatter formatter) {
return value;
}
/** parses from value. rounds according to included flag */ /** parses from value. rounds according to included flag */
public Object parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, public Object parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce,
boolean included) throws IOException { boolean included) throws IOException {
@ -618,15 +650,12 @@ public enum RangeType {
public abstract Query withinQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); public abstract Query withinQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo);
public abstract Query containsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); public abstract Query containsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo);
public abstract Query intersectsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); public abstract Query intersectsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo);
public Object parse(Object value, boolean coerce) {
return numberType.parse(value, coerce);
}
public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo, public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo,
ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser, ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser,
QueryShardContext context) { QueryShardContext context) {
Object lower = from == null ? minValue() : parse(from, false); Object lower = from == null ? minValue() : parseValue(from, false, dateMathParser);
Object upper = to == null ? maxValue() : parse(to, false); Object upper = to == null ? maxValue() : parseValue(to, false, dateMathParser);
return createRangeQuery(field, hasDocValues, lower, upper, includeFrom, includeTo, relation); return createRangeQuery(field, hasDocValues, lower, upper, includeFrom, includeTo, relation);
} }

View File

@ -500,6 +500,11 @@ public class TextFieldMapper extends FieldMapper {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -526,6 +531,11 @@ public class TextFieldMapper extends FieldMapper {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
@ -828,6 +838,14 @@ public class TextFieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value.toString();
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
List<Mapper> subIterators = new ArrayList<>(); List<Mapper> subIterators = new ArrayList<>();

View File

@ -34,8 +34,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.collapse.CollapseBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;

View File

@ -58,6 +58,7 @@ import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -114,6 +115,7 @@ final class DefaultSearchContext extends SearchContext {
private ScriptFieldsContext scriptFields; private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
private FetchDocValuesContext docValuesContext; private FetchDocValuesContext docValuesContext;
private FetchFieldsContext fetchFieldsContext;
private int from = -1; private int from = -1;
private int size = -1; private int size = -1;
private SortAndFormats sort; private SortAndFormats sort;
@ -476,6 +478,17 @@ final class DefaultSearchContext extends SearchContext {
return this; return this;
} }
@Override
public FetchFieldsContext fetchFieldsContext() {
return fetchFieldsContext;
}
@Override
public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) {
this.fetchFieldsContext = fetchFieldsContext;
return this;
}
@Override @Override
public ContextIndexSearcher searcher() { public ContextIndexSearcher searcher() {
return this.searcher; return this.searcher;

View File

@ -248,6 +248,8 @@ import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase;
import org.elasticsearch.search.fetch.subphase.ExplainPhase; import org.elasticsearch.search.fetch.subphase.ExplainPhase;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.fetch.subphase.FetchScorePhase;
import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; import org.elasticsearch.search.fetch.subphase.FetchSourcePhase;
import org.elasticsearch.search.fetch.subphase.MatchedQueriesPhase; import org.elasticsearch.search.fetch.subphase.MatchedQueriesPhase;
import org.elasticsearch.search.fetch.subphase.FetchScorePhase; import org.elasticsearch.search.fetch.subphase.FetchScorePhase;
@ -821,6 +823,7 @@ public class SearchModule {
registerFetchSubPhase(new FetchDocValuesPhase()); registerFetchSubPhase(new FetchDocValuesPhase());
registerFetchSubPhase(new ScriptFieldsPhase()); registerFetchSubPhase(new ScriptFieldsPhase());
registerFetchSubPhase(new FetchSourcePhase()); registerFetchSubPhase(new FetchSourcePhase());
registerFetchSubPhase(new FetchFieldsPhase());
registerFetchSubPhase(new FetchVersionPhase()); registerFetchSubPhase(new FetchVersionPhase());
registerFetchSubPhase(new SeqNoPrimaryTermPhase()); registerFetchSubPhase(new SeqNoPrimaryTermPhase());
registerFetchSubPhase(new MatchedQueriesPhase()); registerFetchSubPhase(new MatchedQueriesPhase());

View File

@ -86,6 +86,7 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.AliasFilter;
@ -918,6 +919,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields()); FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields());
context.docValuesContext(docValuesContext); context.docValuesContext(docValuesContext);
} }
if (source.fetchFields() != null) {
String indexName = context.indexShard().shardId().getIndexName();
FetchFieldsContext fetchFieldsContext = FetchFieldsContext.create(
indexName, context.mapperService(), source.fetchFields());
context.fetchFieldsContext(fetchFieldsContext);
}
if (source.highlighter() != null) { if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter(); HighlightBuilder highlightBuilder = source.highlighter();
try { try {

View File

@ -39,8 +39,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder;

View File

@ -26,8 +26,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;

View File

@ -48,8 +48,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.collapse.CollapseBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.rescore.RescorerBuilder;
@ -96,6 +96,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
public static final ParseField _SOURCE_FIELD = new ParseField("_source"); public static final ParseField _SOURCE_FIELD = new ParseField("_source");
public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields"); public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields");
public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields"); public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields");
public static final ParseField FETCH_FIELDS_FIELD = new ParseField("fields");
public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields");
public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField SCRIPT_FIELD = new ParseField("script");
public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure");
@ -172,6 +173,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
private List<FieldAndFormat> docValueFields; private List<FieldAndFormat> docValueFields;
private List<ScriptField> scriptFields; private List<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
private List<FieldAndFormat> fetchFields;
private AggregatorFactories.Builder aggregations; private AggregatorFactories.Builder aggregations;
@ -264,6 +266,11 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
} else { } else {
trackTotalHitsUpTo = in.readBoolean() ? TRACK_TOTAL_HITS_ACCURATE : TRACK_TOTAL_HITS_DISABLED; trackTotalHitsUpTo = in.readBoolean() ? TRACK_TOTAL_HITS_ACCURATE : TRACK_TOTAL_HITS_DISABLED;
} }
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
if (in.readBoolean()) {
fetchFields = in.readList(FieldAndFormat::new);
}
}
} }
@Override @Override
@ -330,6 +337,12 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
} else { } else {
out.writeBoolean(trackTotalHitsUpTo == null ? true : trackTotalHitsUpTo > SearchContext.TRACK_TOTAL_HITS_DISABLED); out.writeBoolean(trackTotalHitsUpTo == null ? true : trackTotalHitsUpTo > SearchContext.TRACK_TOTAL_HITS_DISABLED);
} }
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
out.writeBoolean(fetchFields != null);
if (fetchFields != null) {
out.writeList(fetchFields);
}
}
} }
/** /**
@ -856,6 +869,33 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
return docValueField(name, null); return docValueField(name, null);
} }
/**
* Gets the fields to load and return as part of the search request.
*/
public List<FieldAndFormat> fetchFields() {
return fetchFields;
}
/**
* Adds a field to load and return as part of the search request.
*/
public SearchSourceBuilder fetchField(String name) {
return fetchField(name, null);
}
/**
* Adds a field to load and return as part of the search request.
* @param name the field name.
* @param format an optional format string used when formatting values, for example a date format.
*/
public SearchSourceBuilder fetchField(String name, @Nullable String format) {
if (fetchFields == null) {
fetchFields = new ArrayList<>();
}
fetchFields.add(new FieldAndFormat(name, format));
return this;
}
/** /**
* Adds a script field under the given name with the provided script. * Adds a script field under the given name with the provided script.
* *
@ -1162,6 +1202,11 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
docValueFields.add(FieldAndFormat.fromXContent(parser)); docValueFields.add(FieldAndFormat.fromXContent(parser));
} }
} else if (FETCH_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fetchFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fetchFields.add(FieldAndFormat.fromXContent(parser));
}
} else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexBoosts.add(new IndexBoost(parser)); indexBoosts.add(new IndexBoost(parser));
@ -1259,12 +1304,15 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
if (docValueFields != null) { if (docValueFields != null) {
builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName());
for (FieldAndFormat docValueField : docValueFields) { for (FieldAndFormat docValueField : docValueFields) {
builder.startObject() docValueField.toXContent(builder, params);
.field("field", docValueField.field); }
if (docValueField.format != null) { builder.endArray();
builder.field("format", docValueField.format); }
}
builder.endObject(); if (fetchFields != null) {
builder.startArray(FETCH_FIELDS_FIELD.getPreferredName());
for (FieldAndFormat docValueField : fetchFields) {
docValueField.toXContent(builder, params);
} }
builder.endArray(); builder.endArray();
} }

View File

@ -106,7 +106,8 @@ public class FetchPhase implements SearchPhase {
if (!context.hasScriptFields() && !context.hasFetchSourceContext()) { if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
context.fetchSourceContext(new FetchSourceContext(true)); context.fetchSourceContext(new FetchSourceContext(true));
} }
fieldsVisitor = new FieldsVisitor(context.sourceRequested()); boolean loadSource = context.sourceRequested() || context.fetchFieldsContext() != null;
fieldsVisitor = new FieldsVisitor(loadSource);
} else if (storedFieldsContext.fetchFields() == false) { } else if (storedFieldsContext.fetchFields() == false) {
// disable stored fields entirely // disable stored fields entirely
fieldsVisitor = null; fieldsVisitor = null;
@ -135,7 +136,7 @@ public class FetchPhase implements SearchPhase {
} }
} }
} }
boolean loadSource = context.sourceRequested(); boolean loadSource = context.sourceRequested() || context.fetchFieldsContext() != null;
if (storedToRequestedFields.isEmpty()) { if (storedToRequestedFields.isEmpty()) {
// empty list specified, default to disable _source if no explicit indication // empty list specified, default to disable _source if no explicit indication
fieldsVisitor = new FieldsVisitor(loadSource); fieldsVisitor = new FieldsVisitor(loadSource);

View File

@ -18,103 +18,17 @@
*/ */
package org.elasticsearch.search.fetch.subphase; package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* All the required context to pull a field from the doc values. * All the required context to pull a field from the doc values.
*/ */
public class FetchDocValuesContext { public class FetchDocValuesContext {
/**
* Wrapper around a field name and the format that should be used to
* display values of this field.
*/
public static final class FieldAndFormat implements Writeable {
private static final ConstructingObjectParser<FieldAndFormat, Void> PARSER = new ConstructingObjectParser<>("docvalues_field",
a -> new FieldAndFormat((String) a[0], (String) a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("format"));
}
/**
* Parse a {@link FieldAndFormat} from some {@link XContent}.
*/
public static FieldAndFormat fromXContent(XContentParser parser) throws IOException {
Token token = parser.currentToken();
if (token.isValue()) {
return new FieldAndFormat(parser.text(), null);
} else {
return PARSER.apply(parser, null);
}
}
/** The name of the field. */
public final String field;
/** The format of the field, or {@code null} if defaults should be used. */
public final String format;
/** Sole constructor. */
public FieldAndFormat(String field, @Nullable String format) {
this.field = Objects.requireNonNull(field);
this.format = format;
}
/** Serialization constructor. */
public FieldAndFormat(StreamInput in) throws IOException {
this.field = in.readString();
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
format = in.readOptionalString();
} else {
format = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalString(format);
}
}
@Override
public int hashCode() {
int h = field.hashCode();
h = 31 * h + Objects.hashCode(format);
return h;
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
FieldAndFormat other = (FieldAndFormat) obj;
return field.equals(other.field) && Objects.equals(format, other.format);
}
}
private final List<FieldAndFormat> fields; private final List<FieldAndFormat> fields;
public static FetchDocValuesContext create(MapperService mapperService, public static FetchDocValuesContext create(MapperService mapperService,

View File

@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import java.util.List;
/**
* The context needed to retrieve fields.
*/
public class FetchFieldsContext {
private FieldValueRetriever fieldValueRetriever;
public static FetchFieldsContext create(String indexName,
MapperService mapperService,
List<FieldAndFormat> fields) {
DocumentMapper documentMapper = mapperService.documentMapper();
if (documentMapper.sourceMapper().enabled() == false) {
throw new IllegalArgumentException("Unable to retrieve the requested [fields] since _source is " +
"disabled in the mappings for index [" + indexName + "]");
}
FieldValueRetriever fieldValueRetriever = FieldValueRetriever.create(mapperService, fields);
return new FetchFieldsContext(fieldValueRetriever);
}
private FetchFieldsContext(FieldValueRetriever fieldValueRetriever) {
this.fieldValueRetriever = fieldValueRetriever;
}
public FieldValueRetriever fieldValueRetriever() {
return fieldValueRetriever;
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A fetch sub-phase for high-level field retrieval. Given a list of fields, it
* retrieves the field values from _source and returns them as document fields.
*/
public final class FetchFieldsPhase implements FetchSubPhase {
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
FetchFieldsContext fetchFieldsContext = context.fetchFieldsContext();
if (fetchFieldsContext == null) {
return;
}
SearchHit hit = hitContext.hit();
SourceLookup sourceLookup = context.lookup().source();
FieldValueRetriever fieldValueRetriever = fetchFieldsContext.fieldValueRetriever();
Set<String> ignoredFields = getIgnoredFields(hit);
Map<String, DocumentField> documentFields = fieldValueRetriever.retrieve(sourceLookup, ignoredFields);
for (Map.Entry<String, DocumentField> entry : documentFields.entrySet()) {
hit.setDocumentField(entry.getKey(), entry.getValue());
}
}
private Set<String> getIgnoredFields(SearchHit hit) {
DocumentField field = hit.field(IgnoredFieldMapper.NAME);
if (field == null) {
return org.elasticsearch.common.collect.Set.of();
}
Set<String> ignoredFields = new HashSet<>();
for (Object value : field.getValues()) {
ignoredFields.add((String) value);
}
return ignoredFields;
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Wrapper around a field name and the format that should be used to
* display values of this field.
*/
public final class FieldAndFormat implements Writeable, ToXContentObject {
private static final ParseField FIELD_FIELD = new ParseField("field");
private static final ParseField FORMAT_FIELD = new ParseField("format");
private static final ConstructingObjectParser<FieldAndFormat, Void> PARSER = new ConstructingObjectParser<>("docvalues_field",
a -> new FieldAndFormat((String) a[0], (String) a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("format"));
}
/**
* Parse a {@link FieldAndFormat} from some {@link XContent}.
*/
public static FieldAndFormat fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
if (token.isValue()) {
return new FieldAndFormat(parser.text(), null);
} else {
return PARSER.apply(parser, null);
}
}
/** The name of the field. */
public final String field;
/** The format of the field, or {@code null} if defaults should be used. */
public final String format;
/** Sole constructor. */
public FieldAndFormat(String field, @Nullable String format) {
this.field = Objects.requireNonNull(field);
this.format = format;
}
/** Serialization constructor. */
public FieldAndFormat(StreamInput in) throws IOException {
this.field = in.readString();
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
format = in.readOptionalString();
} else {
format = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalString(format);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldAndFormat that = (FieldAndFormat) o;
return Objects.equals(field, that.field) &&
Objects.equals(format, that.format);
}
@Override
public int hashCode() {
int h = field.hashCode();
h = 31 * h + Objects.hashCode(format);
return h;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FIELD_FIELD.getPreferredName(), field);
if (format != null) {
builder.field(FORMAT_FIELD.getPreferredName(), format);
}
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,107 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.lookup.SourceLookup;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A helper class to {@link FetchFieldsPhase} that's initialized with a list of field patterns to fetch.
* Then given a specific document, it can retrieve the corresponding fields from the document's source.
*/
public class FieldValueRetriever {
private final DocumentFieldMappers fieldMappers;
private final List<FieldContext> fieldContexts;
public static FieldValueRetriever create(MapperService mapperService,
Collection<FieldAndFormat> fieldAndFormats) {
DocumentFieldMappers fieldMappers = mapperService.documentMapper().mappers();
List<FieldContext> fields = new ArrayList<>();
for (FieldAndFormat fieldAndFormat : fieldAndFormats) {
String fieldPattern = fieldAndFormat.field;
String format = fieldAndFormat.format;
Collection<String> concreteFields = mapperService.simpleMatchToFullName(fieldPattern);
for (String field : concreteFields) {
if (fieldMappers.getMapper(field) != null && mapperService.isMetadataField(field) == false) {
Set<String> sourcePath = mapperService.sourcePath(field);
fields.add(new FieldContext(field, sourcePath, format));
}
}
}
return new FieldValueRetriever(fieldMappers, fields);
}
private FieldValueRetriever(DocumentFieldMappers fieldMappers,
List<FieldContext> fieldContexts) {
this.fieldMappers = fieldMappers;
this.fieldContexts = fieldContexts;
}
public Map<String, DocumentField> retrieve(SourceLookup sourceLookup, Set<String> ignoredFields) {
Map<String, DocumentField> documentFields = new HashMap<>();
for (FieldContext context : fieldContexts) {
String field = context.fieldName;
if (ignoredFields.contains(field)) {
continue;
}
List<Object> parsedValues = new ArrayList<>();
for (String path : context.sourcePath) {
FieldMapper fieldMapper = (FieldMapper) fieldMappers.getMapper(path);
List<?> values = fieldMapper.lookupValues(sourceLookup, context.format);
parsedValues.addAll(values);
}
if (parsedValues.isEmpty() == false) {
documentFields.put(field, new DocumentField(field, parsedValues));
}
}
return documentFields;
}
private static class FieldContext {
final String fieldName;
final Set<String> sourcePath;
final @Nullable String format;
FieldContext(String fieldName,
Set<String> sourcePath,
@Nullable String format) {
this.fieldName = fieldName;
this.sourcePath = sourcePath;
this.format = format;
}
}
}

View File

@ -50,6 +50,7 @@ import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
@ -201,6 +202,16 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract SearchContext docValuesContext(FetchDocValuesContext docValuesContext); public abstract SearchContext docValuesContext(FetchDocValuesContext docValuesContext);
/**
* The context related to retrieving fields.
*/
public abstract FetchFieldsContext fetchFieldsContext();
/**
* Sets the context related to retrieving fields.
*/
public abstract SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext);
public abstract ContextIndexSearcher searcher(); public abstract ContextIndexSearcher searcher();
public abstract IndexShard indexShard(); public abstract IndexShard indexShard();

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.collapse.CollapseContext;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -59,6 +60,7 @@ public class SubSearchContext extends FilteredSearchContext {
private ScriptFieldsContext scriptFields; private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
private FetchDocValuesContext docValuesContext; private FetchDocValuesContext docValuesContext;
private FetchFieldsContext fetchFieldsContext;
private SearchContextHighlight highlight; private SearchContextHighlight highlight;
private boolean explain; private boolean explain;
@ -160,6 +162,17 @@ public class SubSearchContext extends FilteredSearchContext {
return this; return this;
} }
@Override
public FetchFieldsContext fetchFieldsContext() {
return fetchFieldsContext;
}
@Override
public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) {
this.fetchFieldsContext = fetchFieldsContext;
return this;
}
@Override @Override
public void timeout(TimeValue timeout) { public void timeout(TimeValue timeout) {
throw new UnsupportedOperationException("Not supported"); throw new UnsupportedOperationException("Not supported");

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -132,6 +133,22 @@ public class SourceLookup implements Map {
return XContentMapValues.extractRawValues(path, loadSourceIfNeeded()); return XContentMapValues.extractRawValues(path, loadSourceIfNeeded());
} }
/**
* For the provided path, return its value in the source.
*
* Note that in contrast with {@link SourceLookup#extractRawValues}, array and object values
* can be returned.
*
* @param path the value's path in the source.
* @param nullValue a value to return if the path exists, but the value is 'null'. This helps
* in distinguishing between a path that doesn't exist vs. a value of 'null'.
*
* @return the value associated with the path in the source or 'null' if the path does not exist.
*/
public Object extractValue(String path, @Nullable Object nullValue) {
return XContentMapValues.extractValue(path, loadSourceIfNeeded(), nullValue);
}
public Object filter(FetchSourceContext context) { public Object filter(FetchSourceContext context) {
return context.getFilter().apply(loadSourceIfNeeded()); return context.getFilter().apply(loadSourceIfNeeded());
} }

View File

@ -164,6 +164,37 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value")); assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value"));
} }
public void testExtractValueWithNullValue() throws Exception {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.nullField("other_field")
.array("array", "value1", null, "value2")
.startObject("object1")
.startObject("object2").nullField("field").endObject()
.endObject()
.startArray("object_array")
.startObject().nullField("field").endObject()
.startObject().field("field", "value").endObject()
.endArray()
.endObject();
Map<String, Object> map;
try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) {
map = parser.map();
}
assertEquals("value", XContentMapValues.extractValue("field", map, "NULL"));
assertNull(XContentMapValues.extractValue("missing", map, "NULL"));
assertNull(XContentMapValues.extractValue("field.missing", map, "NULL"));
assertNull(XContentMapValues.extractValue("object1.missing", map, "NULL"));
assertEquals("NULL", XContentMapValues.extractValue("other_field", map, "NULL"));
assertEquals(org.elasticsearch.common.collect.List.of("value1", "NULL", "value2"),
XContentMapValues.extractValue("array", map, "NULL"));
assertEquals(org.elasticsearch.common.collect.List.of("NULL", "value"),
XContentMapValues.extractValue("object_array.field", map, "NULL"));
assertEquals("NULL", XContentMapValues.extractValue("object1.object2.field", map, "NULL"));
}
public void testExtractRawValue() throws Exception { public void testExtractRawValue() throws Exception {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject() XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
.field("test", "value") .field("test", "value")

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.get;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Map;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -121,11 +122,27 @@ public class DocumentFieldTests extends ESTestCase {
} }
return Tuple.tuple(documentField, documentField); return Tuple.tuple(documentField, documentField);
} else { } else {
String fieldName = randomAlphaOfLengthBetween(3, 10); switch (randomIntBetween(0, 2)) {
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); case 0:
DocumentField input = new DocumentField(fieldName, tuple.v1()); String fieldName = randomAlphaOfLengthBetween(3, 10);
DocumentField expected = new DocumentField(fieldName, tuple.v2()); Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
return Tuple.tuple(input, expected); DocumentField input = new DocumentField(fieldName, tuple.v1());
DocumentField expected = new DocumentField(fieldName, tuple.v2());
return Tuple.tuple(input, expected);
case 1:
List<Object> listValues = randomList(1, 5, () -> randomList(1, 5, ESTestCase::randomInt));
DocumentField listField = new DocumentField(randomAlphaOfLength(5), listValues);
return Tuple.tuple(listField, listField);
case 2:
List<Object> objectValues = randomList(1, 5, () ->
Map.of(randomAlphaOfLength(5), randomInt(),
randomAlphaOfLength(5), randomBoolean(),
randomAlphaOfLength(5), randomAlphaOfLength(10)));
DocumentField objectField = new DocumentField(randomAlphaOfLength(5), objectValues);
return Tuple.tuple(objectField, objectField);
default:
throw new IllegalStateException();
}
} }
} }
} }

View File

@ -33,9 +33,13 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -44,6 +48,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
@ -51,6 +56,8 @@ import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -293,4 +300,22 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null)); assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null));
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
BooleanFieldMapper mapper = new BooleanFieldMapper.Builder("field").build(context);
assertTrue(mapper.parseSourceValue(true, null));
assertFalse(mapper.parseSourceValue("false", null));
assertFalse(mapper.parseSourceValue("", null));
Map<String, Object> mapping = org.elasticsearch.common.collect.Map.of("type", "boolean", "null_value", true);
BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder("field");
builder.parse("field", null, new HashMap<>(mapping));
BooleanFieldMapper nullValueMapper = builder.build(context);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(List.of(true), nullValueMapper.lookupValues(sourceLookup, null));
}
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -31,6 +32,8 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp; import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -42,6 +45,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.hamcrest.FeatureMatcher; import org.hamcrest.FeatureMatcher;
@ -51,6 +55,7 @@ import org.hamcrest.core.CombinableMatcher;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
@ -935,6 +940,22 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
"[" + COMPLETION_CONTEXTS_LIMIT + "] starting in version [8.0]."); "[" + COMPLETION_CONTEXTS_LIMIT + "] starting in version [8.0].");
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
CompletionFieldMapper mapper = new CompletionFieldMapper.Builder("completion", defaultAnalyzer).build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), mapper.parseSourceValue("value", null));
List<String> list = org.elasticsearch.common.collect.List.of("first", "second");
assertEquals(list, mapper.parseSourceValue(list, null));
Map<String, Object> object = org.elasticsearch.common.collect.Map.of("input",
org.elasticsearch.common.collect.List.of("first", "second"), "weight", "2.718");
assertEquals(org.elasticsearch.common.collect.List.of(object), mapper.parseSourceValue(object, null));
}
private Matcher<IndexableField> suggestField(String value) { private Matcher<IndexableField> suggestField(String value) {
return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)),
Matchers.instanceOf(SuggestField.class)); Matchers.instanceOf(SuggestField.class));

View File

@ -21,17 +21,22 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
@ -42,6 +47,8 @@ import java.time.ZoneOffset;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -446,4 +453,72 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(mapping3, mapper.mappingSource().toString()); assertEquals(mapping3, mapper.mappingSource().toString());
} }
public void testParseSourceValue() {
DateFieldMapper mapper = createMapper(Resolution.MILLISECONDS, null);
String date = "2020-05-15T21:33:02.000Z";
assertEquals(date, mapper.parseSourceValue(date, null));
assertEquals(date, mapper.parseSourceValue(1589578382000L, null));
DateFieldMapper mapperWithFormat = createMapper(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis");
String dateInFormat = "1990/12/29";
assertEquals(dateInFormat, mapperWithFormat.parseSourceValue(dateInFormat, null));
assertEquals(dateInFormat, mapperWithFormat.parseSourceValue(662428800000L, null));
DateFieldMapper mapperWithMillis = createMapper(Resolution.MILLISECONDS, "epoch_millis");
String dateInMillis = "662428800000";
assertEquals(dateInMillis, mapperWithMillis.parseSourceValue(dateInMillis, null));
assertEquals(dateInMillis, mapperWithMillis.parseSourceValue(662428800000L, null));
String nullValueDate = "2020-05-15T21:33:02.000Z";
DateFieldMapper nullValueMapper = createMapper(Resolution.MILLISECONDS, null, nullValueDate);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of(nullValueDate), nullValueMapper.lookupValues(sourceLookup, null));
}
public void testParseSourceValueWithFormat() {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z");
String date = "1990-12-29T00:00:00.000Z";
assertEquals("1990/12/29", mapper.parseSourceValue(date, "yyyy/MM/dd"));
assertEquals("662428800000", mapper.parseSourceValue(date, "epoch_millis"));
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of("1970/12/29"), mapper.lookupValues(sourceLookup, "yyyy/MM/dd"));
}
public void testParseSourceValueNanos() {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis");
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals("2020-05-15T21:33:02.123456789Z", mapper.parseSourceValue(date, null));
assertEquals("2020-05-15T21:33:02.123Z", mapper.parseSourceValue(1589578382123L, null));
String nullValueDate = "2020-05-15T21:33:02.123456789Z";
DateFieldMapper nullValueMapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of(nullValueDate), nullValueMapper.lookupValues(sourceLookup, null));
}
private DateFieldMapper createMapper(Resolution resolution, String format) {
return createMapper(resolution, format, null);
}
private DateFieldMapper createMapper(Resolution resolution, String format, String nullValue) {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
Map<String, Object> mapping = new HashMap<>();
mapping.put("type", "date_nanos");
if (format != null) {
mapping.put("format", format);
}
if (nullValue != null) {
mapping.put("null_value", nullValue);
}
DateFieldMapper.Builder builder = new DateFieldMapper.Builder("field", Version.CURRENT, resolution, null, false);
builder.parse("field", null, mapping);
return builder.build(context);
}
} }

View File

@ -102,6 +102,11 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) throws IOException {
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {

View File

@ -204,6 +204,11 @@ public class ExternalMapper extends FieldMapper {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
protected Object parseSourceValue(Object value, String format) {
return value;
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator());

View File

@ -133,6 +133,11 @@ public class FakeStringFieldMapper extends FieldMapper {
} }
} }
@Override
protected String parseSourceValue(Object value, String format) {
return value.toString();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.common.collect.Set;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.util.Arrays; import java.util.Arrays;
@ -27,6 +28,7 @@ import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
public class FieldTypeLookupTests extends ESTestCase { public class FieldTypeLookupTests extends ESTestCase {
@ -77,6 +79,59 @@ public class FieldTypeLookupTests extends ESTestCase {
assertTrue(names.contains("barometer")); assertTrue(names.contains("barometer"));
} }
public void testSourcePathWithMultiFields() {
Mapper.BuilderContext context = new Mapper.BuilderContext(
MockFieldMapper.DEFAULT_SETTINGS, new ContentPath());
MockFieldMapper field = new MockFieldMapper.Builder("field")
.addMultiField(new MockFieldMapper.Builder("field.subfield1"))
.addMultiField(new MockFieldMapper.Builder("field.subfield2"))
.build(context);
FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), emptyList());
assertEquals(Set.of("field"), lookup.sourcePaths("field"));
assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield1"));
assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield2"));
}
public void testSourcePathWithAliases() {
Mapper.BuilderContext context = new Mapper.BuilderContext(
MockFieldMapper.DEFAULT_SETTINGS, new ContentPath());
MockFieldMapper field = new MockFieldMapper.Builder("field")
.addMultiField(new MockFieldMapper.Builder("field.subfield"))
.build(context);
FieldAliasMapper alias1 = new FieldAliasMapper("alias1", "alias1", "field");
FieldAliasMapper alias2 = new FieldAliasMapper("alias2", "alias2", "field.subfield");
FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), Arrays.asList(alias1, alias2));
assertEquals(Set.of("field"), lookup.sourcePaths("alias1"));
assertEquals(Set.of("field"), lookup.sourcePaths("alias2"));
}
public void testSourcePathsWithCopyTo() {
Mapper.BuilderContext context = new Mapper.BuilderContext(
MockFieldMapper.DEFAULT_SETTINGS, new ContentPath());
MockFieldMapper field = new MockFieldMapper.Builder("field")
.addMultiField(new MockFieldMapper.Builder("field.subfield1"))
.build(context);
MockFieldMapper otherField = new MockFieldMapper.Builder("other_field")
.copyTo(new FieldMapper.CopyTo.Builder()
.add("field")
.build())
.build(context);
FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field, otherField), emptyList());
assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field"));
assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field.subfield1"));
}
public void testIteratorImmutable() { public void testIteratorImmutable() {
MockFieldMapper f1 = new MockFieldMapper("foo"); MockFieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f1), emptyList()); FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f1), emptyList());

View File

@ -20,23 +20,30 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.geo.RandomGeoGenerator; import org.elasticsearch.test.geo.RandomGeoGenerator;
import org.hamcrest.CoreMatchers; import org.hamcrest.CoreMatchers;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
@ -586,6 +593,39 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase<GeoPointFieldM
), XContentType.JSON)).rootDoc().getField("location"), nullValue()); ), XContentType.JSON)).rootDoc().getField("location"), nullValue());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
AbstractGeometryFieldMapper<?, ?> mapper = new GeoPointFieldMapper.Builder("field").build(context);
SourceLookup sourceLookup = new SourceLookup();
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(42.0, 27.1));
Map<String, Object> otherJsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(30.0, 50.0));
String wktPoint = "POINT (42.0 27.1)";
String otherWktPoint = "POINT (30.0 50.0)";
// Test a single point in [lon, lat] array format.
sourceLookup.setSource(Collections.singletonMap("field", List.of(42.0, 27.1)));
assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
// Test a single point in "lat, lon" string format.
sourceLookup.setSource(Collections.singletonMap("field", "27.1,42.0"));
assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
// Test a list of points in [lon, lat] array format.
sourceLookup.setSource(Collections.singletonMap("field", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))));
assertEquals(List.of(jsonPoint, otherJsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktPoint, otherWktPoint), mapper.lookupValues(sourceLookup, "wkt"));
// Test a single point in well-known text format.
sourceLookup.setSource(Collections.singletonMap("field", "POINT (42.0 27.1)"));
assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
}
@Override @Override
protected GeoPointFieldMapper.Builder newBuilder() { protected GeoPointFieldMapper.Builder newBuilder() {
return new GeoPointFieldMapper.Builder("geo"); return new GeoPointFieldMapper.Builder("geo");

View File

@ -19,16 +19,21 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
import org.junit.Before; import org.junit.Before;
@ -36,6 +41,7 @@ import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE;
@ -358,4 +364,38 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
public String toXContentString(GeoShapeFieldMapper mapper) throws IOException { public String toXContentString(GeoShapeFieldMapper mapper) throws IOException {
return toXContentString(mapper, true); return toXContentString(mapper, true);
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
GeoShapeFieldMapper mapper = new GeoShapeFieldMapper.Builder("field").build(context);
SourceLookup sourceLookup = new SourceLookup();
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of("type", "LineString", "coordinates",
List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates", List.of(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
sourceLookup.setSource(Collections.singletonMap("field", jsonLineString));
assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt"));
// Test a list of shapes in geojson format.
sourceLookup.setSource(Collections.singletonMap("field", List.of(jsonLineString, jsonPoint)));
assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
// Test a single shape in wkt format.
sourceLookup.setSource(Collections.singletonMap("field", wktLineString));
assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt"));
// Test a list of shapes in wkt format.
sourceLookup.setSource(Collections.singletonMap("field", List.of(wktLineString, wktPoint)));
assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
}
} }

View File

@ -26,10 +26,14 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -37,12 +41,14 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Set; import java.util.Set;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -297,6 +303,23 @@ public class IpFieldMapperTests extends FieldMapperTestCase<IpFieldMapper.Builde
assertThat(e.getMessage(), containsString("name cannot be empty string")); assertThat(e.getMessage(), containsString("name cannot be empty string"));
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
IpFieldMapper mapper = new IpFieldMapper.Builder("field").build(context);
assertEquals("2001:db8::2:1", mapper.parseSourceValue("2001:db8::2:1", null));
assertEquals("2001:db8::2:1", mapper.parseSourceValue("2001:db8:0:0:0:0:2:1", null));
assertEquals("::1", mapper.parseSourceValue("0:0:0:0:0:0:0:1", null));
IpFieldMapper nullValueMapper = new IpFieldMapper.Builder("field")
.nullValue(InetAddresses.forString("2001:db8:0:0:0:0:2:7"))
.build(context);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(List.of("2001:db8::2:7"), nullValueMapper.lookupValues(sourceLookup, null));
}
@Override @Override
protected IpFieldMapper.Builder newBuilder() { protected IpFieldMapper.Builder newBuilder() {
return new IpFieldMapper.Builder("ip"); return new IpFieldMapper.Builder("ip");

View File

@ -20,10 +20,13 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -80,4 +83,14 @@ public class IpRangeFieldMapperTests extends ESSingleNodeTestCase {
assertThat(storedField.stringValue(), containsString(strVal)); assertThat(storedField.stringValue(), containsString(strVal));
} }
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP).build(context);
Map<String, Object> range = org.elasticsearch.common.collect.Map.of("gte", "2001:db8:0:0:0:0:2:1");
assertEquals(org.elasticsearch.common.collect.Map.of("gte", "2001:db8::2:1"), mapper.parseSourceValue(range, null));
assertEquals("2001:db8::2:1/32", mapper.parseSourceValue("2001:db8:0:0:0:0:2:1/32", null));
}
} }

View File

@ -29,6 +29,8 @@ import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.search.similarities.BooleanSimilarity;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -44,6 +46,7 @@ import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before; import org.junit.Before;
@ -166,6 +169,9 @@ public class KeywordFieldMapperTests extends FieldMapperTestCase<KeywordFieldMap
// used by TermVectorsService // used by TermVectorsService
assertArrayEquals(new String[] { "1234" }, TermVectorsService.getValues(doc.rootDoc().getFields("field"))); assertArrayEquals(new String[] { "1234" }, TermVectorsService.getValues(doc.rootDoc().getFields("field")));
FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
assertEquals("1234", fieldMapper.parseSourceValue("1234", null));
} }
public void testIgnoreAbove() throws IOException { public void testIgnoreAbove() throws IOException {
@ -624,4 +630,38 @@ public class KeywordFieldMapperTests extends FieldMapperTestCase<KeywordFieldMap
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString()); assertEquals(mapping3, mapper.mappingSource().toString());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
KeywordFieldMapper mapper = new KeywordFieldMapper.Builder("field").build(context);
assertEquals("value", mapper.parseSourceValue("value", null));
assertEquals("42", mapper.parseSourceValue(42L, null));
assertEquals("true", mapper.parseSourceValue(true, null));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.parseSourceValue(true, "format"));
assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage());
KeywordFieldMapper ignoreAboveMapper = new KeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
assertNull(ignoreAboveMapper.parseSourceValue("value", null));
assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null));
assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null));
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field")
.normalizer(indexService.getIndexAnalyzers(), "lowercase")
.build(context);
assertEquals("value", normalizerMapper.parseSourceValue("VALUE", null));
assertEquals("42", normalizerMapper.parseSourceValue(42L, null));
assertEquals("value", normalizerMapper.parseSourceValue("value", null));
KeywordFieldMapper nullValueMapper = new KeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null));
}
} }

View File

@ -25,14 +25,18 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -40,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
import org.junit.Before; import org.junit.Before;
@ -47,6 +52,7 @@ import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE;
@ -841,4 +847,39 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException { public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException {
return toXContentString(mapper, true); return toXContentString(mapper, true);
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
LegacyGeoShapeFieldMapper mapper = new LegacyGeoShapeFieldMapper.Builder("field").build(context);
SourceLookup sourceLookup = new SourceLookup();
Map<String, Object> jsonLineString = org.elasticsearch.common.collect.Map.of("type", "LineString", "coordinates",
List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)));
Map<String, Object> jsonPoint = org.elasticsearch.common.collect.Map.of("type", "Point", "coordinates",
org.elasticsearch.common.collect.List.of(14.0, 15.0));
String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)";
String wktPoint = "POINT (14.0 15.0)";
// Test a single shape in geojson format.
sourceLookup.setSource(Collections.singletonMap("field", jsonLineString));
assertEquals(org.elasticsearch.common.collect.List.of(jsonLineString), mapper.lookupValues(sourceLookup, null));
assertEquals(org.elasticsearch.common.collect.List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt"));
// Test a list of shapes in geojson format.
sourceLookup.setSource(Collections.singletonMap("field", org.elasticsearch.common.collect.List.of(jsonLineString, jsonPoint)));
assertEquals(org.elasticsearch.common.collect.List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(org.elasticsearch.common.collect.List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
// Test a single shape in wkt format.
sourceLookup.setSource(Collections.singletonMap("field", wktLineString));
assertEquals(org.elasticsearch.common.collect.List.of(jsonLineString), mapper.lookupValues(sourceLookup, null));
assertEquals(org.elasticsearch.common.collect.List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt"));
// Test a list of shapes in wkt format.
sourceLookup.setSource(Collections.singletonMap("field", org.elasticsearch.common.collect.List.of(wktLineString, wktPoint)));
assertEquals(org.elasticsearch.common.collect.List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null));
assertEquals(org.elasticsearch.common.collect.List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt"));
}
} }

View File

@ -22,11 +22,14 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.annotations.Timeout; import com.carrotsearch.randomizedtesting.annotations.Timeout;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -35,11 +38,13 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.NumberFieldTypeTests.OutOfRangeSpec; import org.elasticsearch.index.mapper.NumberFieldTypeTests.OutOfRangeSpec;
import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -401,6 +406,24 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase<N
} }
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NumberFieldMapper mapper = new NumberFieldMapper.Builder("field", NumberType.INTEGER).build(context);
assertEquals(3, mapper.parseSourceValue(3.14, null));
assertEquals(42, mapper.parseSourceValue("42.9", null));
NumberFieldMapper nullValueMapper = new NumberFieldMapper.Builder("field", NumberType.FLOAT)
.nullValue(2.71f)
.build(context);
assertEquals(2.71f, (float) nullValueMapper.parseSourceValue("", null), 0.00001);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of(2.71f), nullValueMapper.lookupValues(sourceLookup, null));
}
@Timeout(millis = 30000) @Timeout(millis = 30000)
public void testOutOfRangeValues() throws IOException { public void testOutOfRangeValues() throws IOException {
final List<OutOfRangeSpec<Object>> inputs = Arrays.asList( final List<OutOfRangeSpec<Object>> inputs = Arrays.asList(

View File

@ -185,6 +185,11 @@ public class ParametrizedMapperTests extends ESSingleNodeTestCase {
} }
@Override
protected Object parseSourceValue(Object value, String format) {
return null;
}
@Override @Override
protected String contentType() { protected String contentType() {
return "test_mapper"; return "test_mapper";

View File

@ -22,10 +22,13 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -40,6 +43,7 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
@ -49,6 +53,7 @@ import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD;
import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase<RangeFieldMapper.Builder> { public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase<RangeFieldMapper.Builder> {
@Override @Override
@ -486,4 +491,35 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase<Ra
assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage()); assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG).build(context);
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L), longMapper.parseSourceValue(longRange, null));
RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE)
.format("yyyy/MM/dd||epoch_millis")
.build(context);
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", 597429487111L);
assertEquals(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29", "gte", "1988/12/06"),
dateMapper.parseSourceValue(dateRange, null));
}
public void testParseSourceValueWithFormat() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG).build(context);
Map<String, Object> longRange = org.elasticsearch.common.collect.Map.of("gte", 3.14, "lt", "42.9");
assertEquals(org.elasticsearch.common.collect.Map.of("gte", 3L, "lt", 42L), longMapper.parseSourceValue(longRange, null));
RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE)
.format("strict_date_time")
.build(context);
Map<String, Object> dateRange = org.elasticsearch.common.collect.Map.of("lt", "1990-12-29T00:00:00.000Z");
assertEquals(org.elasticsearch.common.collect.Map.of("lt", "1990/12/29"), dateMapper.parseSourceValue(dateRange, "yyy/MM/dd"));
assertEquals(org.elasticsearch.common.collect.Map.of("lt", "662428800000"), dateMapper.parseSourceValue(dateRange, "epoch_millis"));
}
} }

View File

@ -237,13 +237,13 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
assertEquals(1466062190000L, formatter.parseMillis(to)); assertEquals(1466062190000L, formatter.parseMillis(to));
RangeFieldType fieldType = new RangeFieldType(FIELDNAME, true, true, formatter, Collections.emptyMap()); RangeFieldType fieldType = new RangeFieldType(FIELDNAME, true, true, formatter, Collections.emptyMap());
final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, null, context); final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context);
assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", query.toString()); assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", query.toString());
// compare lower and upper bounds with what we would get on a `date` field // compare lower and upper bounds with what we would get on a `date` field
DateFieldType dateFieldType DateFieldType dateFieldType
= new DateFieldType(FIELDNAME, true, true, formatter, DateFieldMapper.Resolution.MILLISECONDS, Collections.emptyMap()); = new DateFieldType(FIELDNAME, true, true, formatter, DateFieldMapper.Resolution.MILLISECONDS, Collections.emptyMap());
final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, null, context); final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context);
assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString());
} }
@ -464,9 +464,9 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
} }
public void testParseIp() { public void testParseIp() {
assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse(InetAddresses.forString("::1"), randomBoolean())); assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue(InetAddresses.forString("::1"), randomBoolean(), null));
assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse("::1", randomBoolean())); assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue("::1", randomBoolean(), null));
assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse(new BytesRef("::1"), randomBoolean())); assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue(new BytesRef("::1"), randomBoolean(), null));
} }
public void testTermQuery() throws Exception { public void testTermQuery() throws Exception {

View File

@ -51,6 +51,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -1344,4 +1345,16 @@ public class TextFieldMapperTests extends FieldMapperTestCase<TextFieldMapper.Bu
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString()); assertEquals(mapping3, mapper.mappingSource().toString());
} }
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
FieldMapper fieldMapper = newBuilder().build(context);
TextFieldMapper mapper = (TextFieldMapper) fieldMapper;
assertEquals("value", mapper.parseSourceValue("value", null));
assertEquals("42", mapper.parseSourceValue(42L, null));
assertEquals("true", mapper.parseSourceValue(true, null));
}
} }

View File

@ -32,8 +32,8 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests;
import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;

View File

@ -185,8 +185,8 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
XContentType xContentType = randomFrom(XContentType.values()); XContentType xContentType = randomFrom(XContentType.values());
SearchHit searchHit = createTestItem(xContentType, true, true); SearchHit searchHit = createTestItem(xContentType, true, true);
BytesReference originalBytes = toXContent(searchHit, xContentType, true); BytesReference originalBytes = toXContent(searchHit, xContentType, true);
Predicate<String> pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") Predicate<String> pathsToExclude = path -> path.endsWith("highlight") || path.contains("fields") || path.contains("_source")
|| path.contains("inner_hits") || path.isEmpty()); || path.contains("inner_hits") || path.isEmpty();
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
SearchHit parsed; SearchHit parsed;

View File

@ -0,0 +1,366 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
public class FieldValueRetrieverTests extends ESSingleNodeTestCase {
public void testLeafValues() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("field", "first", "second")
.startObject("object")
.field("field", "third")
.endObject()
.endObject();
List<FieldAndFormat> fieldAndFormats = org.elasticsearch.common.collect.List.of(
new FieldAndFormat("field", null),
new FieldAndFormat("object.field", null));
Map<String, DocumentField> fields = retrieveFields(mapperService, source, fieldAndFormats);
assertThat(fields.size(), equalTo(2));
DocumentField field = fields.get("field");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(2));
assertThat(field.getValues(), hasItems("first", "second"));
DocumentField objectField = fields.get("object.field");
assertNotNull(objectField);
assertThat(objectField.getValues().size(), equalTo(1));
assertThat(objectField.getValues(), hasItems("third"));
}
public void testObjectValues() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.startObject("float_range")
.field("gte", 0.0f)
.field("lte", 2.718f)
.endObject()
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "float_range");
assertThat(fields.size(), equalTo(1));
DocumentField rangeField = fields.get("float_range");
assertNotNull(rangeField);
assertThat(rangeField.getValues().size(), equalTo(1));
assertThat(rangeField.getValue(), equalTo(org.elasticsearch.common.collect.Map.of("gte", 0.0f, "lte", 2.718f)));
}
public void testNonExistentField() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "non-existent");
assertThat(fields.size(), equalTo(0));
}
public void testMetadataFields() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "_routing");
assertTrue(fields.isEmpty());
}
public void testRetrieveAllFields() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.startObject("object")
.field("field", "other-value")
.endObject()
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "*");
assertThat(fields.size(), equalTo(2));
}
public void testArrayValueMappers() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("geo_point", 27.1, 42.0)
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "geo_point");
assertThat(fields.size(), equalTo(1));
DocumentField field = fields.get("geo_point");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(1));
// Test a field with multiple geo-points.
source = XContentFactory.jsonBuilder().startObject()
.startArray("geo_point")
.startArray().value(27.1).value(42.0).endArray()
.startArray().value(31.4).value(42.0).endArray()
.endArray()
.endObject();
fields = retrieveFields(mapperService, source, "geo_point");
assertThat(fields.size(), equalTo(1));
field = fields.get("geo_point");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(2));
}
public void testFieldNamesWithWildcard() throws IOException {
MapperService mapperService = createMapperService();;
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("field", "first", "second")
.field("integer_field", 333)
.startObject("object")
.field("field", "fourth")
.endObject()
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "*field");
assertThat(fields.size(), equalTo(3));
DocumentField field = fields.get("field");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(2));
assertThat(field.getValues(), hasItems("first", "second"));
DocumentField otherField = fields.get("integer_field");
assertNotNull(otherField);
assertThat(otherField.getValues().size(), equalTo(1));
assertThat(otherField.getValues(), hasItems(333));
DocumentField objectField = fields.get("object.field");
assertNotNull(objectField);
assertThat(objectField.getValues().size(), equalTo(1));
assertThat(objectField.getValues(), hasItems("fourth"));
}
public void testDateFormat() throws IOException {
MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.field("date_field", "1990-12-29T00:00:00.000Z")
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, org.elasticsearch.common.collect.List.of(
new FieldAndFormat("field", null),
new FieldAndFormat("date_field", "yyyy/MM/dd")));
assertThat(fields.size(), equalTo(2));
DocumentField field = fields.get("field");
assertNotNull(field);
DocumentField dateField = fields.get("date_field");
assertNotNull(dateField);
assertThat(dateField.getValues().size(), equalTo(1));
assertThat(dateField.getValue(), equalTo("1990/12/29"));
}
public void testIgnoreAbove() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.field("ignore_above", 20)
.endObject()
.endObject()
.endObject();
IndexService indexService = createIndex("index", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping);
MapperService mapperService = indexService.mapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("field", "value", "other_value", "really_really_long_value")
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "field");
DocumentField field = fields.get("field");
assertThat(field.getValues().size(), equalTo(2));
source = XContentFactory.jsonBuilder().startObject()
.array("field", "really_really_long_value")
.endObject();
fields = retrieveFields(mapperService, source, "field");
assertFalse(fields.containsKey("field"));
}
public void testFieldAliases() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field").field("type", "keyword").endObject()
.startObject("alias_field")
.field("type", "alias")
.field("path", "field")
.endObject()
.endObject()
.endObject();
IndexService indexService = createIndex("index", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping);
MapperService mapperService = indexService.mapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "alias_field");
assertThat(fields.size(), equalTo(1));
DocumentField field = fields.get("alias_field");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(1));
assertThat(field.getValues(), hasItems("value"));
fields = retrieveFields(mapperService, source, "*field");
assertThat(fields.size(), equalTo(2));
assertTrue(fields.containsKey("alias_field"));
assertTrue(fields.containsKey("field"));
}
public void testMultiFields() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "integer")
.startObject("fields")
.startObject("keyword").field("type", "keyword").endObject()
.endObject()
.endObject()
.endObject()
.endObject();
IndexService indexService = createIndex("index", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping);
MapperService mapperService = indexService.mapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.field("field", 42)
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "field.keyword");
assertThat(fields.size(), equalTo(1));
DocumentField field = fields.get("field.keyword");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(1));
assertThat(field.getValues(), hasItems(42));
fields = retrieveFields(mapperService, source, "field*");
assertThat(fields.size(), equalTo(2));
assertTrue(fields.containsKey("field"));
assertTrue(fields.containsKey("field.keyword"));
}
public void testCopyTo() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.startObject("other_field")
.field("type", "integer")
.field("copy_to", "field")
.endObject()
.endObject()
.endObject();
IndexService indexService = createIndex("index", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping);
MapperService mapperService = indexService.mapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("field", "one", "two", "three")
.array("other_field", 1, 2, 3)
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "field");
assertThat(fields.size(), equalTo(1));
DocumentField field = fields.get("field");
assertNotNull(field);
assertThat(field.getValues().size(), equalTo(6));
assertThat(field.getValues(), hasItems("one", "two", "three", 1, 2, 3));
}
public void testObjectFields() throws IOException {
MapperService mapperService = createMapperService();;
XContentBuilder source = XContentFactory.jsonBuilder().startObject()
.array("field", "first", "second")
.startObject("object")
.field("field", "third")
.endObject()
.endObject();
Map<String, DocumentField> fields = retrieveFields(mapperService, source, "object");
assertFalse(fields.containsKey("object"));
}
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, String fieldPattern) {
List<FieldAndFormat> fields = org.elasticsearch.common.collect.List.of(new FieldAndFormat(fieldPattern, null));
return retrieveFields(mapperService, source, fields);
}
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, List<FieldAndFormat> fields) {
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(BytesReference.bytes(source));
FieldValueRetriever fetchFieldsLookup = FieldValueRetriever.create(mapperService, fields);
return fetchFieldsLookup.retrieve(sourceLookup, org.elasticsearch.common.collect.Set.of());
}
public MapperService createMapperService() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field").field("type", "keyword").endObject()
.startObject("integer_field").field("type", "integer").endObject()
.startObject("date_field").field("type", "date").endObject()
.startObject("geo_point").field("type", "geo_point").endObject()
.startObject("float_range").field("type", "float_range").endObject()
.startObject("object")
.startObject("properties")
.startObject("field").field("type", "keyword").endObject()
.endObject()
.endObject()
.startObject("field_that_does_not_match").field("type", "keyword").endObject()
.endObject()
.endObject();
IndexService indexService = createIndex("index", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping);
return indexService.mapperService();
}
}

View File

@ -87,9 +87,9 @@ public class CompletionSuggestionOptionTests extends ESTestCase {
// also there can be inner search hits fields inside this option, we need to exclude another couple of paths // also there can be inner search hits fields inside this option, we need to exclude another couple of paths
// where we cannot add random stuff. We also exclude the root level, this is done for SearchHits as all unknown fields // where we cannot add random stuff. We also exclude the root level, this is done for SearchHits as all unknown fields
// for SearchHit on a root level are interpreted as meta-fields and will be kept // for SearchHit on a root level are interpreted as meta-fields and will be kept
Predicate<String> excludeFilter = (path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) Predicate<String> excludeFilter = (path) -> path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName())
|| path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") || path.endsWith("highlight") || path.contains("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.isEmpty()); || path.isEmpty();
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -105,9 +105,10 @@ public class SuggestionEntryTests extends ESTestCase {
// exclude "options" which contain SearchHits, // exclude "options" which contain SearchHits,
// on root level of SearchHit fields are interpreted as meta-fields and will be kept // on root level of SearchHit fields are interpreted as meta-fields and will be kept
Predicate<String> excludeFilter = ( Predicate<String> excludeFilter = (
path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") path -> path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight")
|| path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") || path.contains("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.contains("options")); || path.contains("options"));
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -124,10 +124,10 @@ public class SuggestionTests extends ESTestCase {
// - the root object should be excluded since it contains the named suggestion arrays // - the root object should be excluded since it contains the named suggestion arrays
// We also exclude options that contain SearchHits, as all unknown fields // We also exclude options that contain SearchHits, as all unknown fields
// on a root level of SearchHit are interpreted as meta-fields and will be kept. // on a root level of SearchHit are interpreted as meta-fields and will be kept.
Predicate<String> excludeFilter = path -> (path.isEmpty() Predicate<String> excludeFilter = path -> path.isEmpty()
|| path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") || path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight")
|| path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") || path.contains("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.contains("options")); || path.contains("options");
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -24,6 +24,9 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
@ -32,6 +35,9 @@ import java.util.List;
// this sucks how much must be overridden just do get a dummy field mapper... // this sucks how much must be overridden just do get a dummy field mapper...
public class MockFieldMapper extends FieldMapper { public class MockFieldMapper extends FieldMapper {
static Settings DEFAULT_SETTINGS = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id)
.build();
public MockFieldMapper(String fullName) { public MockFieldMapper(String fullName) {
this(new FakeFieldType(fullName)); this(new FakeFieldType(fullName));
@ -42,6 +48,13 @@ public class MockFieldMapper extends FieldMapper {
MultiFields.empty(), new CopyTo.Builder().build()); MultiFields.empty(), new CopyTo.Builder().build());
} }
public MockFieldMapper(String fullName,
MappedFieldType fieldType,
MultiFields multifields,
CopyTo copyTo) {
super(findSimpleName(fullName), new FieldType(), fieldType, multifields, copyTo);
}
static String findSimpleName(String fullName) { static String findSimpleName(String fullName) {
int ndx = fullName.lastIndexOf('.'); int ndx = fullName.lastIndexOf('.');
return fullName.substring(ndx + 1); return fullName.substring(ndx + 1);
@ -76,8 +89,29 @@ public class MockFieldMapper extends FieldMapper {
protected void parseCreateField(ParseContext context) throws IOException { protected void parseCreateField(ParseContext context) throws IOException {
} }
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException();
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
} }
public static class Builder extends FieldMapper.Builder<MockFieldMapper.Builder> {
private MappedFieldType fieldType;
protected Builder(String name) {
super(name, new FieldType());
this.fieldType = new FakeFieldType(name);
this.builder = this;
}
@Override
public MockFieldMapper build(BuilderContext context) {
MultiFields multiFields = multiFieldsBuilder.build(this, context);
return new MockFieldMapper(name(), fieldType, multiFields, copyTo);
}
}
} }

View File

@ -46,6 +46,7 @@ import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -272,6 +273,16 @@ public class TestSearchContext extends SearchContext {
return null; return null;
} }
@Override
public FetchFieldsContext fetchFieldsContext() {
return null;
}
@Override
public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) {
return null;
}
@Override @Override
public ContextIndexSearcher searcher() { public ContextIndexSearcher searcher() {
return searcher; return searcher;

View File

@ -164,6 +164,14 @@ public class HistogramFieldMapper extends FieldMapper {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
} }
@Override
protected Object parseSourceValue(Object value, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return value;
}
public static class HistogramFieldType extends MappedFieldType { public static class HistogramFieldType extends MappedFieldType {
public HistogramFieldType(String name, boolean hasDocValues, Map<String, String> meta) { public HistogramFieldType(String name, boolean hasDocValues, Map<String, String> meta) {

View File

@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.TypeParsers; import org.elasticsearch.index.mapper.TypeParsers;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.time.ZoneId; import java.time.ZoneId;
@ -262,6 +263,22 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
} }
} }
@Override
public List<String> lookupValues(SourceLookup lookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return fieldType().value == null
? org.elasticsearch.common.collect.List.of()
: org.elasticsearch.common.collect.List.of(fieldType().value);
}
@Override
protected Object parseSourceValue(Object value, String format) {
throw new UnsupportedOperationException("This should never be called, since lookupValues is implemented directly.");
}
@Override @Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) { protected void mergeOptions(FieldMapper other, List<String> conflicts) {
ConstantKeywordFieldType newConstantKeywordFT = (ConstantKeywordFieldType) other.fieldType(); ConstantKeywordFieldType newConstantKeywordFT = (ConstantKeywordFieldType) other.fieldType();

Some files were not shown because too many files have changed in this diff Show More