Add a `format` option to `docvalue_fields`. (#29639)
This commit adds the ability to configure how a docvalue field should be formatted, so that it would be possible eg. to return a date field formatted as the number of milliseconds since Epoch. Closes #27740
This commit is contained in:
parent
4aa345e6dc
commit
a19df4ab3b
|
@ -11,13 +11,38 @@ GET /_search
|
|||
"query" : {
|
||||
"match_all": {}
|
||||
},
|
||||
"docvalue_fields" : ["test1", "test2"]
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "my_ip_field", <1>
|
||||
"format": "use_field_mapping" <2>
|
||||
},
|
||||
{
|
||||
"field": "my_date_field",
|
||||
"format": "epoch_millis" <3>
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> the name of the field
|
||||
<2> the special `use_field_mapping` format tells Elasticsearch to use the format from the mapping
|
||||
<3> date fields may use a custom format
|
||||
|
||||
Doc value fields can work on fields that are not stored.
|
||||
|
||||
Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache
|
||||
causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption.
|
||||
|
||||
[float]
|
||||
==== Custom formats
|
||||
|
||||
While most fields do not support custom formats, some of them do:
|
||||
- <<date,Date>> fields can take any <<mapping-date-format,date format>>.
|
||||
- <<number,Numeric>> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern].
|
||||
|
||||
All fields support the special `use_field_mapping` format, which tells
|
||||
Elasticsearch to use the mappings to figure out a default format.
|
||||
|
||||
NOTE: The default is currently to return the same output as
|
||||
<<search-request-script-fields,script fields>>. However it will change in 7.0
|
||||
to behave as if the `use_field_mapping` format was provided.
|
||||
|
|
|
@ -242,7 +242,12 @@ POST test/_search
|
|||
},
|
||||
"inner_hits": {
|
||||
"_source" : false,
|
||||
"docvalue_fields" : ["comments.text.keyword"]
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "comments.text.keyword",
|
||||
"format": "use_field_mapping"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,9 +45,8 @@ setup:
|
|||
"Nested doc version and seqIDs":
|
||||
|
||||
- skip:
|
||||
# fixed in 6.0.1
|
||||
version: " - 6.0.0"
|
||||
reason: "version and seq IDs where not accurate in previous versions"
|
||||
version: " - 6.99.99" # TODO change to 6.3.99 on backport
|
||||
reason: "object notation for docvalue_fields was introduced in 6.4"
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
@ -61,7 +60,7 @@ setup:
|
|||
|
||||
- do:
|
||||
search:
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
@ -84,7 +83,7 @@ setup:
|
|||
|
||||
- do:
|
||||
search:
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
|
|
@ -133,7 +133,53 @@ setup:
|
|||
|
||||
---
|
||||
"docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change version on backport
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
|
||||
search:
|
||||
body:
|
||||
docvalue_fields: [ "count" ]
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields as url param":
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change version on backport
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
|
||||
search:
|
||||
docvalue_fields: [ "count" ]
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields with default format":
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change version on backport
|
||||
reason: format option was added in 6.4
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
docvalue_fields:
|
||||
- field: "count"
|
||||
format: "use_field_mapping"
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields with explicit format":
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change version on backport
|
||||
reason: format option was added in 6.4
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
docvalue_fields:
|
||||
- field: "count"
|
||||
format: "#.0"
|
||||
- match: { hits.hits.0.fields.count: ["1.0"] }
|
||||
|
|
|
@ -62,6 +62,9 @@ setup:
|
|||
---
|
||||
"Docvalues_fields size limit":
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change to 6.3.99 on backport
|
||||
reason: "The object notation for docvalue_fields is only supported on 6.4+"
|
||||
- do:
|
||||
catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./
|
||||
search:
|
||||
|
@ -69,7 +72,13 @@ setup:
|
|||
body:
|
||||
query:
|
||||
match_all: {}
|
||||
docvalue_fields: ["one", "two", "three"]
|
||||
docvalue_fields:
|
||||
- field: "one"
|
||||
format: "use_field_mapping"
|
||||
- field: "two"
|
||||
format: "use_field_mapping"
|
||||
- field: "three"
|
||||
format: "use_field_mapping"
|
||||
|
||||
---
|
||||
"Script_fields size limit":
|
||||
|
|
|
@ -153,7 +153,7 @@ final class ExpandSearchPhase extends SearchPhase {
|
|||
}
|
||||
}
|
||||
if (options.getDocValueFields() != null) {
|
||||
options.getDocValueFields().forEach(groupSource::docValueField);
|
||||
options.getDocValueFields().forEach(ff -> groupSource.docValueField(ff.field, ff.format));
|
||||
}
|
||||
if (options.getStoredFieldsContext() != null && options.getStoredFieldsContext().fieldNames() != null) {
|
||||
options.getStoredFieldsContext().fieldNames().forEach(groupSource::storedField);
|
||||
|
|
|
@ -290,11 +290,21 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
*
|
||||
* @param name The field to get from the docvalue
|
||||
*/
|
||||
public SearchRequestBuilder addDocValueField(String name) {
|
||||
sourceBuilder().docValueField(name);
|
||||
public SearchRequestBuilder addDocValueField(String name, String format) {
|
||||
sourceBuilder().docValueField(name, format);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a docvalue based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The field to get from the docvalue
|
||||
*/
|
||||
public SearchRequestBuilder addDocValueField(String name) {
|
||||
return addDocValueField(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a stored field to load and return (note, it must be stored) as part of the search request.
|
||||
*/
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
@ -45,6 +46,7 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT;
|
||||
|
||||
|
@ -65,7 +67,8 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD);
|
||||
PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD);
|
||||
PARSER.declareStringArray(InnerHitBuilder::setStoredFieldNames, SearchSourceBuilder.STORED_FIELDS_FIELD);
|
||||
PARSER.declareStringArray(InnerHitBuilder::setDocValueFields, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD);
|
||||
PARSER.declareObjectArray(InnerHitBuilder::setDocValueFields,
|
||||
(p,c) -> FieldAndFormat.fromXContent(p), SearchSourceBuilder.DOCVALUE_FIELDS_FIELD);
|
||||
PARSER.declareField((p, i, c) -> {
|
||||
try {
|
||||
Set<ScriptField> scriptFields = new HashSet<>();
|
||||
|
@ -102,7 +105,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
private StoredFieldsContext storedFieldsContext;
|
||||
private QueryBuilder query = DEFAULT_INNER_HIT_QUERY;
|
||||
private List<SortBuilder<?>> sorts;
|
||||
private List<String> docValueFields;
|
||||
private List<FieldAndFormat> docValueFields;
|
||||
private Set<ScriptField> scriptFields;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
@ -134,7 +137,18 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
version = in.readBoolean();
|
||||
trackScores = in.readBoolean();
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
docValueFields = (List<String>) in.readGenericValue();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
List<String> fieldList = (List<String>) in.readGenericValue();
|
||||
if (fieldList == null) {
|
||||
docValueFields = null;
|
||||
} else {
|
||||
docValueFields = fieldList.stream()
|
||||
.map(field -> new FieldAndFormat(field, null))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
} else {
|
||||
docValueFields = in.readBoolean() ? in.readList(FieldAndFormat::new) : null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
scriptFields = new HashSet<>(size);
|
||||
|
@ -174,7 +188,16 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
out.writeBoolean(version);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeGenericValue(docValueFields);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
out.writeGenericValue(docValueFields == null
|
||||
? null
|
||||
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
|
||||
} else {
|
||||
out.writeBoolean(docValueFields != null);
|
||||
if (docValueFields != null) {
|
||||
out.writeList(docValueFields);
|
||||
}
|
||||
}
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
|
@ -248,7 +271,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
out.writeBoolean(version);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeGenericValue(docValueFields);
|
||||
out.writeGenericValue(docValueFields == null
|
||||
? null
|
||||
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
|
@ -390,14 +415,14 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
/**
|
||||
* Gets the docvalue fields.
|
||||
*/
|
||||
public List<String> getDocValueFields() {
|
||||
public List<FieldAndFormat> getDocValueFields() {
|
||||
return docValueFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the stored fields to load from the docvalue and return.
|
||||
*/
|
||||
public InnerHitBuilder setDocValueFields(List<String> docValueFields) {
|
||||
public InnerHitBuilder setDocValueFields(List<FieldAndFormat> docValueFields) {
|
||||
this.docValueFields = docValueFields;
|
||||
return this;
|
||||
}
|
||||
|
@ -405,14 +430,21 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
/**
|
||||
* Adds a field to load from the docvalue and return.
|
||||
*/
|
||||
public InnerHitBuilder addDocValueField(String field) {
|
||||
public InnerHitBuilder addDocValueField(String field, String format) {
|
||||
if (docValueFields == null) {
|
||||
docValueFields = new ArrayList<>();
|
||||
}
|
||||
docValueFields.add(field);
|
||||
docValueFields.add(new FieldAndFormat(field, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to load from doc values and return.
|
||||
*/
|
||||
public InnerHitBuilder addDocValueField(String field) {
|
||||
return addDocValueField(field, null);
|
||||
}
|
||||
|
||||
public Set<ScriptField> getScriptFields() {
|
||||
return scriptFields;
|
||||
}
|
||||
|
@ -489,8 +521,15 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
}
|
||||
if (docValueFields != null) {
|
||||
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
|
||||
for (String docValueField : docValueFields) {
|
||||
builder.value(docValueField);
|
||||
for (FieldAndFormat docValueField : docValueFields) {
|
||||
if (docValueField.format == null) {
|
||||
builder.value(docValueField.field);
|
||||
} else {
|
||||
builder.startObject()
|
||||
.field("field", docValueField.field)
|
||||
.field("format", docValueField.format)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
|
|
@ -214,7 +214,7 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
if (Strings.hasText(sDocValueFields)) {
|
||||
String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields);
|
||||
for (String field : sFields) {
|
||||
searchSourceBuilder.docValueField(field);
|
||||
searchSourceBuilder.docValueField(field, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,17 +49,17 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
/** Format a long value. This is used by terms and histogram aggregations
|
||||
* to format keys for fields that use longs as a doc value representation
|
||||
* such as the {@code long} and {@code date} fields. */
|
||||
String format(long value);
|
||||
Object format(long value);
|
||||
|
||||
/** Format a double value. This is used by terms and stats aggregations
|
||||
* to format keys for fields that use numbers as a doc value representation
|
||||
* such as the {@code long}, {@code double} or {@code date} fields. */
|
||||
String format(double value);
|
||||
Object format(double value);
|
||||
|
||||
/** Format a binary value. This is used by terms aggregations to format
|
||||
* keys for fields that use binary doc value representations such as the
|
||||
* {@code keyword} and {@code ip} fields. */
|
||||
String format(BytesRef value);
|
||||
Object format(BytesRef value);
|
||||
|
||||
/** Parse a value that was formatted with {@link #format(long)} back to the
|
||||
* original long value. */
|
||||
|
@ -85,13 +85,13 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return Long.toString(value);
|
||||
public Long format(long value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return Double.toString(value);
|
||||
public Double format(double value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -235,13 +235,13 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return java.lang.Boolean.valueOf(value != 0).toString();
|
||||
public Boolean format(long value) {
|
||||
return java.lang.Boolean.valueOf(value != 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(double value) {
|
||||
return java.lang.Boolean.valueOf(value != 0).toString();
|
||||
public Boolean format(double value) {
|
||||
return java.lang.Boolean.valueOf(value != 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -407,8 +407,8 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
|
|||
final long high = nextTransition;
|
||||
|
||||
final DocValueFormat format = ft.docValueFormat(null, null);
|
||||
final String formattedLow = format.format(low);
|
||||
final String formattedHigh = format.format(high);
|
||||
final Object formattedLow = format.format(low);
|
||||
final Object formattedHigh = format.format(high);
|
||||
if (ft.isFieldWithinQuery(reader, formattedLow, formattedHigh,
|
||||
true, false, tz, null, context) == Relation.WITHIN) {
|
||||
// All values in this reader have the same offset despite daylight saving times.
|
||||
|
|
|
@ -107,7 +107,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(key);
|
||||
return format.format(key).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -138,7 +138,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
String keyAsString = format.format(key);
|
||||
String keyAsString = format.format(key).toString();
|
||||
if (keyed) {
|
||||
builder.startObject(keyAsString);
|
||||
} else {
|
||||
|
|
|
@ -103,7 +103,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(key);
|
||||
return format.format(key).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -134,7 +134,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
String keyAsString = format.format(key);
|
||||
String keyAsString = format.format(key).toString();
|
||||
if (keyed) {
|
||||
builder.startObject(keyAsString);
|
||||
} else {
|
||||
|
|
|
@ -155,7 +155,7 @@ public final class InternalBinaryRange
|
|||
|
||||
@Override
|
||||
public String getFromAsString() {
|
||||
return from == null ? null : format.format(from);
|
||||
return from == null ? null : format.format(from).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -165,7 +165,7 @@ public final class InternalBinaryRange
|
|||
|
||||
@Override
|
||||
public String getToAsString() {
|
||||
return to == null ? null : format.format(to);
|
||||
return to == null ? null : format.format(to).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -98,7 +98,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
if (Double.isInfinite(from)) {
|
||||
return null;
|
||||
} else {
|
||||
return format.format(from);
|
||||
return format.format(from).toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
|||
if (Double.isInfinite(to)) {
|
||||
return null;
|
||||
} else {
|
||||
return format.format(to);
|
||||
return format.format(to).toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ public class SignificantLongTerms extends InternalMappedSignificantTerms<Signifi
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(term);
|
||||
return format.format(term).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -83,7 +83,7 @@ public class SignificantStringTerms extends InternalMappedSignificantTerms<Signi
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(termBytes);
|
||||
return format.format(termBytes).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -153,12 +153,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
}
|
||||
|
||||
public long getBackgroundFrequency(BytesRef termBytes) throws IOException {
|
||||
String value = config.format().format(termBytes);
|
||||
String value = config.format().format(termBytes).toString();
|
||||
return getBackgroundFrequency(value);
|
||||
}
|
||||
|
||||
public long getBackgroundFrequency(long termNum) throws IOException {
|
||||
String value = config.format().format(termNum);
|
||||
String value = config.format().format(termNum).toString();
|
||||
return getBackgroundFrequency(value);
|
||||
}
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory<Signific
|
|||
}
|
||||
|
||||
public long getBackgroundFrequency(BytesRef termBytes) throws IOException {
|
||||
String value = format.format(termBytes);
|
||||
String value = format.format(termBytes).toString();
|
||||
return getBackgroundFrequency(value);
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(term);
|
||||
return format.format(term).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,7 +90,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
|
|||
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(CommonFields.KEY.getPreferredName(), term);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term));
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class LongTerms extends InternalMappedTerms<LongTerms, LongTerms.Bucket>
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(term);
|
||||
return format.format(term).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,7 +90,7 @@ public class LongTerms extends InternalMappedTerms<LongTerms, LongTerms.Bucket>
|
|||
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(CommonFields.KEY.getPreferredName(), term);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term));
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -80,7 +80,7 @@ public class StringTerms extends InternalMappedTerms<StringTerms, StringTerms.Bu
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return format.format(termBytes);
|
||||
return format.format(termBytes).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,7 +48,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalAggregat
|
|||
|
||||
@Override
|
||||
public String getValueAsString() {
|
||||
return format.format(value());
|
||||
return format.format(value()).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -79,7 +79,7 @@ public abstract class InternalNumericMetricsAggregation extends InternalAggregat
|
|||
public abstract double value(String name);
|
||||
|
||||
public String valueAsString(String name) {
|
||||
return format.format(value(name));
|
||||
return format.format(value(name)).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -113,7 +113,7 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), count != 0 ? getValue() : null);
|
||||
if (count != 0 && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue()));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue()).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
|
|||
boolean hasValue = !Double.isInfinite(max);
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? max : null);
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(max));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(max).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ public class InternalMin extends InternalNumericMetricsAggregation.SingleValue i
|
|||
boolean hasValue = !Double.isInfinite(min);
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? min : null);
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(min));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(min).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -137,7 +137,7 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), value);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), value);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue i
|
|||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), sum);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(sum));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(sum).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
|
@ -70,7 +71,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
private List<SortBuilder<?>> sorts = null;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private StoredFieldsContext storedFieldsContext;
|
||||
private List<String> fieldDataFields;
|
||||
private List<FieldAndFormat> docValueFields;
|
||||
private Set<ScriptField> scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
||||
|
@ -91,7 +92,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
new HighlightBuilder(clone.highlightBuilder, clone.highlightBuilder.highlightQuery(), clone.highlightBuilder.fields());
|
||||
this.storedFieldsContext = clone.storedFieldsContext == null ? null :
|
||||
new StoredFieldsContext(clone.storedFieldsContext);
|
||||
this.fieldDataFields = clone.fieldDataFields == null ? null : new ArrayList<>(clone.fieldDataFields);
|
||||
this.docValueFields = clone.docValueFields == null ? null : new ArrayList<>(clone.docValueFields);
|
||||
this.scriptFields = clone.scriptFields == null ? null : new HashSet<>(clone.scriptFields);
|
||||
this.fetchSourceContext = clone.fetchSourceContext == null ? null :
|
||||
new FetchSourceContext(clone.fetchSourceContext.fetchSource(), clone.fetchSourceContext.includes(),
|
||||
|
@ -112,9 +113,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
fieldDataFields = new ArrayList<>(size);
|
||||
docValueFields = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
fieldDataFields.add(in.readString());
|
||||
docValueFields.add(new FieldAndFormat(in));
|
||||
}
|
||||
}
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
|
@ -143,12 +144,12 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeBoolean(explain);
|
||||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
boolean hasFieldDataFields = fieldDataFields != null;
|
||||
boolean hasFieldDataFields = docValueFields != null;
|
||||
out.writeBoolean(hasFieldDataFields);
|
||||
if (hasFieldDataFields) {
|
||||
out.writeVInt(fieldDataFields.size());
|
||||
for (String fieldName : fieldDataFields) {
|
||||
out.writeString(fieldName);
|
||||
out.writeVInt(docValueFields.size());
|
||||
for (FieldAndFormat ff : docValueFields) {
|
||||
ff.writeTo(out);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
|
@ -404,40 +405,33 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
}
|
||||
|
||||
/**
|
||||
* Adds a field to load from the field data cache and return as part of
|
||||
* Adds a field to load from doc values and return as part of
|
||||
* the search request.
|
||||
*/
|
||||
public TopHitsAggregationBuilder fieldDataField(String fieldDataField) {
|
||||
if (fieldDataField == null) {
|
||||
throw new IllegalArgumentException("[fieldDataField] must not be null: [" + name + "]");
|
||||
public TopHitsAggregationBuilder docValueField(String docValueField, String format) {
|
||||
if (docValueField == null) {
|
||||
throw new IllegalArgumentException("[docValueField] must not be null: [" + name + "]");
|
||||
}
|
||||
if (fieldDataFields == null) {
|
||||
fieldDataFields = new ArrayList<>();
|
||||
if (docValueFields == null) {
|
||||
docValueFields = new ArrayList<>();
|
||||
}
|
||||
fieldDataFields.add(fieldDataField);
|
||||
docValueFields.add(new FieldAndFormat(docValueField, format));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds fields to load from the field data cache and return as part of
|
||||
* Adds a field to load from doc values and return as part of
|
||||
* the search request.
|
||||
*/
|
||||
public TopHitsAggregationBuilder fieldDataFields(List<String> fieldDataFields) {
|
||||
if (fieldDataFields == null) {
|
||||
throw new IllegalArgumentException("[fieldDataFields] must not be null: [" + name + "]");
|
||||
}
|
||||
if (this.fieldDataFields == null) {
|
||||
this.fieldDataFields = new ArrayList<>();
|
||||
}
|
||||
this.fieldDataFields.addAll(fieldDataFields);
|
||||
return this;
|
||||
public TopHitsAggregationBuilder docValueField(String docValueField) {
|
||||
return docValueField(docValueField, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the field-data fields.
|
||||
*/
|
||||
public List<String> fieldDataFields() {
|
||||
return fieldDataFields;
|
||||
public List<FieldAndFormat> fieldDataFields() {
|
||||
return docValueFields;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -587,7 +581,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
optionalSort = SortBuilder.buildSort(sorts, context.getQueryShardContext());
|
||||
}
|
||||
return new TopHitsAggregatorFactory(name, from, size, explain, version, trackScores, optionalSort, highlightBuilder,
|
||||
storedFieldsContext, fieldDataFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData);
|
||||
storedFieldsContext, docValueFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -603,10 +597,15 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
if (storedFieldsContext != null) {
|
||||
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
|
||||
}
|
||||
if (fieldDataFields != null) {
|
||||
if (docValueFields != null) {
|
||||
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldDataField : fieldDataFields) {
|
||||
builder.value(fieldDataField);
|
||||
for (FieldAndFormat dvField : docValueFields) {
|
||||
builder.startObject()
|
||||
.field("field", dvField.field);
|
||||
if (dvField.format != null) {
|
||||
builder.field("format", dvField.format);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
@ -725,14 +724,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
} else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
List<String> fieldDataFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
fieldDataFields.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING
|
||||
+ "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
FieldAndFormat ff = FieldAndFormat.fromXContent(parser);
|
||||
factory.docValueField(ff.field, ff.format);
|
||||
}
|
||||
}
|
||||
factory.fieldDataFields(fieldDataFields);
|
||||
} else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(parser);
|
||||
factory.sorts(sorts);
|
||||
|
@ -752,7 +746,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(explain, fetchSourceContext, fieldDataFields, storedFieldsContext, from, highlightBuilder,
|
||||
return Objects.hash(explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
|
||||
scriptFields, size, sorts, trackScores, version);
|
||||
}
|
||||
|
||||
|
@ -761,7 +755,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
TopHitsAggregationBuilder other = (TopHitsAggregationBuilder) obj;
|
||||
return Objects.equals(explain, other.explain)
|
||||
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
|
||||
&& Objects.equals(fieldDataFields, other.fieldDataFields)
|
||||
&& Objects.equals(docValueFields, other.docValueFields)
|
||||
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
|
||||
&& Objects.equals(from, other.from)
|
||||
&& Objects.equals(highlightBuilder, other.highlightBuilder)
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
|
@ -47,13 +48,13 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
private final Optional<SortAndFormats> sort;
|
||||
private final HighlightBuilder highlightBuilder;
|
||||
private final StoredFieldsContext storedFieldsContext;
|
||||
private final List<String> docValueFields;
|
||||
private final List<FieldAndFormat> docValueFields;
|
||||
private final List<ScriptFieldsContext.ScriptField> scriptFields;
|
||||
private final FetchSourceContext fetchSourceContext;
|
||||
|
||||
TopHitsAggregatorFactory(String name, int from, int size, boolean explain, boolean version, boolean trackScores,
|
||||
Optional<SortAndFormats> sort, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext,
|
||||
List<String> docValueFields, List<ScriptFieldsContext.ScriptField> scriptFields, FetchSourceContext fetchSourceContext,
|
||||
List<FieldAndFormat> docValueFields, List<ScriptFieldsContext.ScriptField> scriptFields, FetchSourceContext fetchSourceContext,
|
||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, context, parent, subFactories, metaData);
|
||||
|
|
|
@ -85,7 +85,7 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl
|
|||
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -108,7 +108,7 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation
|
|||
boolean hasValue = !Double.isInfinite(value);
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
|
||||
if (hasValue && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString());
|
||||
}
|
||||
builder.startArray(KEYS_FIELD.getPreferredName());
|
||||
for (String key : keys) {
|
||||
|
|
|
@ -97,7 +97,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
|
|||
|
||||
@Override
|
||||
public String percentileAsString(double percent) {
|
||||
return format.format(percentile(percent));
|
||||
return format.format(percentile(percent)).toString();
|
||||
}
|
||||
|
||||
DocValueFormat formatter() {
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
|
|||
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.collapse.CollapseBuilder;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -64,6 +65,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
|
||||
|
||||
|
@ -162,7 +164,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
|
||||
|
||||
private StoredFieldsContext storedFieldsContext;
|
||||
private List<String> docValueFields;
|
||||
private List<FieldAndFormat> docValueFields;
|
||||
private List<ScriptField> scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
||||
|
@ -197,7 +199,22 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
|
||||
explain = in.readOptionalBoolean();
|
||||
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
|
||||
docValueFields = (List<String>) in.readGenericValue();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
List<String> dvFields = (List<String>) in.readGenericValue();
|
||||
if (dvFields == null) {
|
||||
docValueFields = null;
|
||||
} else {
|
||||
docValueFields = dvFields.stream()
|
||||
.map(field -> new FieldAndFormat(field, null))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
} else {
|
||||
if (in.readBoolean()) {
|
||||
docValueFields = in.readList(FieldAndFormat::new);
|
||||
} else {
|
||||
docValueFields = null;
|
||||
}
|
||||
}
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
from = in.readVInt();
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
|
@ -246,7 +263,16 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
out.writeOptionalWriteable(aggregations);
|
||||
out.writeOptionalBoolean(explain);
|
||||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
out.writeGenericValue(docValueFields);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
out.writeGenericValue(docValueFields == null
|
||||
? null
|
||||
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
|
||||
} else {
|
||||
out.writeBoolean(docValueFields != null);
|
||||
if (docValueFields != null) {
|
||||
out.writeList(docValueFields);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeVInt(from);
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
|
@ -764,22 +790,30 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
/**
|
||||
* Gets the docvalue fields.
|
||||
*/
|
||||
public List<String> docValueFields() {
|
||||
public List<FieldAndFormat> docValueFields() {
|
||||
return docValueFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to load from the docvalue and return as part of the
|
||||
* Adds a field to load from the doc values and return as part of the
|
||||
* search request.
|
||||
*/
|
||||
public SearchSourceBuilder docValueField(String name) {
|
||||
public SearchSourceBuilder docValueField(String name, @Nullable String format) {
|
||||
if (docValueFields == null) {
|
||||
docValueFields = new ArrayList<>();
|
||||
}
|
||||
docValueFields.add(name);
|
||||
docValueFields.add(new FieldAndFormat(name, format));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to load from the doc values and return as part of the
|
||||
* search request.
|
||||
*/
|
||||
public SearchSourceBuilder docValueField(String name) {
|
||||
return docValueField(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script field under the given name with the provided script.
|
||||
*
|
||||
|
@ -1076,12 +1110,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
} else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
docValueFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
docValueFields.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
|
||||
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
docValueFields.add(FieldAndFormat.fromXContent(parser));
|
||||
}
|
||||
} else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -1177,8 +1206,13 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
|
||||
if (docValueFields != null) {
|
||||
builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName());
|
||||
for (String docValueField : docValueFields) {
|
||||
builder.value(docValueField);
|
||||
for (FieldAndFormat docValueField : docValueFields) {
|
||||
builder.startObject()
|
||||
.field("field", docValueField.field);
|
||||
if (docValueField.format != null) {
|
||||
builder.field("format", docValueField.format);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
|
|
@ -18,23 +18,111 @@
|
|||
*/
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* All the required context to pull a field from the doc values.
|
||||
*/
|
||||
public class DocValueFieldsContext {
|
||||
|
||||
private final List<String> fields;
|
||||
public static final String USE_DEFAULT_FORMAT = "use_field_mapping";
|
||||
|
||||
public DocValueFieldsContext(List<String> fields) {
|
||||
/**
|
||||
* Wrapper around a field name and the format that should be used to
|
||||
* display values of this field.
|
||||
*/
|
||||
public static final class FieldAndFormat implements Writeable {
|
||||
|
||||
private static final ConstructingObjectParser<FieldAndFormat, Void> PARSER = new ConstructingObjectParser<>("script",
|
||||
a -> new FieldAndFormat((String) a[0], (String) a[1]));
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("format"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a {@link FieldAndFormat} from some {@link XContent}.
|
||||
*/
|
||||
public static FieldAndFormat fromXContent(XContentParser parser) throws IOException {
|
||||
Token token = parser.currentToken();
|
||||
if (token.isValue()) {
|
||||
return new FieldAndFormat(parser.text(), null);
|
||||
} else {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
||||
/** The name of the field. */
|
||||
public final String field;
|
||||
|
||||
/** The format of the field, or {@code null} if defaults should be used. */
|
||||
public final String format;
|
||||
|
||||
/** Sole constructor. */
|
||||
public FieldAndFormat(String field, @Nullable String format) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
/** Serialization constructor. */
|
||||
public FieldAndFormat(StreamInput in) throws IOException {
|
||||
this.field = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
format = in.readOptionalString();
|
||||
} else {
|
||||
format = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(field);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { // TODO: change to 6.4.0 after backport
|
||||
out.writeOptionalString(format);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = field.hashCode();
|
||||
h = 31 * h + Objects.hashCode(format);
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
FieldAndFormat other = (FieldAndFormat) obj;
|
||||
return field.equals(other.field) && Objects.equals(format, other.format);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final List<FieldAndFormat> fields;
|
||||
|
||||
public DocValueFieldsContext(List<FieldAndFormat> fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the required docvalue fields
|
||||
*/
|
||||
public List<String> fields() {
|
||||
public List<FieldAndFormat> fields() {
|
||||
return this.fields;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,19 +20,32 @@ package org.elasticsearch.search.fetch.subphase;
|
|||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Query sub phase which pulls data from doc values
|
||||
|
@ -41,6 +54,8 @@ import java.util.HashMap;
|
|||
*/
|
||||
public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
||||
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(DocValueFieldsFetchSubPhase.class));
|
||||
|
||||
@Override
|
||||
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
|
||||
|
||||
|
@ -48,9 +63,10 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
// retrieve the `doc_value` associated with the collapse field
|
||||
String name = context.collapse().getFieldType().name();
|
||||
if (context.docValueFieldsContext() == null) {
|
||||
context.docValueFieldsContext(new DocValueFieldsContext(Collections.singletonList(name)));
|
||||
} else if (context.docValueFieldsContext().fields().contains(name) == false) {
|
||||
context.docValueFieldsContext().fields().add(name);
|
||||
context.docValueFieldsContext(new DocValueFieldsContext(
|
||||
Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT))));
|
||||
} else if (context.docValueFieldsContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) {
|
||||
context.docValueFieldsContext().fields().add(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,24 +75,51 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
}
|
||||
|
||||
hits = hits.clone(); // don't modify the incoming hits
|
||||
Arrays.sort(hits, (a, b) -> Integer.compare(a.docId(), b.docId()));
|
||||
Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId));
|
||||
|
||||
for (String field : context.docValueFieldsContext().fields()) {
|
||||
for (FieldAndFormat fieldAndFormat : context.docValueFieldsContext().fields()) {
|
||||
String field = fieldAndFormat.field;
|
||||
MappedFieldType fieldType = context.mapperService().fullName(field);
|
||||
if (fieldType != null) {
|
||||
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
|
||||
final DocValueFormat format;
|
||||
if (fieldAndFormat.format == null) {
|
||||
DEPRECATION_LOGGER.deprecated("Doc-value field [" + fieldAndFormat.field + "] is not using a format. The output will " +
|
||||
"change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass " +
|
||||
"[format={}] with the doc value field in order to opt in for the future behaviour and ease the migration to " +
|
||||
"7.0.", DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
format = null;
|
||||
} else {
|
||||
String formatDesc = fieldAndFormat.format;
|
||||
if (Objects.equals(formatDesc, DocValueFieldsContext.USE_DEFAULT_FORMAT)) {
|
||||
formatDesc = null;
|
||||
}
|
||||
format = fieldType.docValueFormat(formatDesc, null);
|
||||
}
|
||||
LeafReaderContext subReaderContext = null;
|
||||
AtomicFieldData data = null;
|
||||
ScriptDocValues<?> values = null;
|
||||
ScriptDocValues<?> scriptValues = null; // legacy
|
||||
SortedBinaryDocValues binaryValues = null; // binary / string / ip fields
|
||||
SortedNumericDocValues longValues = null; // int / date fields
|
||||
SortedNumericDoubleValues doubleValues = null; // floating-point fields
|
||||
for (SearchHit hit : hits) {
|
||||
// if the reader index has changed we need to get a new doc values reader instance
|
||||
if (subReaderContext == null || hit.docId() >= subReaderContext.docBase + subReaderContext.reader().maxDoc()) {
|
||||
int readerIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves());
|
||||
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
||||
data = context.getForField(fieldType).load(subReaderContext);
|
||||
values = data.getScriptValues();
|
||||
data = indexFieldData.load(subReaderContext);
|
||||
if (format == null) {
|
||||
scriptValues = data.getScriptValues();
|
||||
} else if (indexFieldData instanceof IndexNumericFieldData) {
|
||||
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
|
||||
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
|
||||
} else {
|
||||
longValues = ((AtomicNumericFieldData) data).getLongValues();
|
||||
}
|
||||
} else {
|
||||
binaryValues = data.getBytesValues();
|
||||
}
|
||||
}
|
||||
int subDocId = hit.docId() - subReaderContext.docBase;
|
||||
values.setNextDocId(subDocId);
|
||||
if (hit.fieldsOrNull() == null) {
|
||||
hit.fields(new HashMap<>(2));
|
||||
}
|
||||
|
@ -85,7 +128,33 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
hitField = new DocumentField(field, new ArrayList<>(2));
|
||||
hit.getFields().put(field, hitField);
|
||||
}
|
||||
hitField.getValues().addAll(values);
|
||||
final List<Object> values = hitField.getValues();
|
||||
|
||||
int subDocId = hit.docId() - subReaderContext.docBase;
|
||||
if (scriptValues != null) {
|
||||
scriptValues.setNextDocId(subDocId);
|
||||
values.addAll(scriptValues);
|
||||
} else if (binaryValues != null) {
|
||||
if (binaryValues.advanceExact(subDocId)) {
|
||||
for (int i = 0, count = binaryValues.docValueCount(); i < count; ++i) {
|
||||
values.add(format.format(binaryValues.nextValue()));
|
||||
}
|
||||
}
|
||||
} else if (longValues != null) {
|
||||
if (longValues.advanceExact(subDocId)) {
|
||||
for (int i = 0, count = longValues.docValueCount(); i < count; ++i) {
|
||||
values.add(format.format(longValues.nextValue()));
|
||||
}
|
||||
}
|
||||
} else if (doubleValues != null) {
|
||||
if (doubleValues.advanceExact(subDocId)) {
|
||||
for (int i = 0, count = doubleValues.docValueCount(); i < count; ++i) {
|
||||
values.add(format.format(doubleValues.nextValue()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new AssertionError("Unreachable code");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,8 +36,8 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
public void testValueFormat() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals("false", ft.docValueFormat(null, null).format(0));
|
||||
assertEquals("true", ft.docValueFormat(null, null).format(1));
|
||||
assertEquals(false, ft.docValueFormat(null, null).format(0));
|
||||
assertEquals(true, ft.docValueFormat(null, null).format(1));
|
||||
}
|
||||
|
||||
public void testValueForSearch() {
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests;
|
||||
import org.elasticsearch.search.internal.ShardSearchLocalRequest;
|
||||
|
@ -147,7 +149,9 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16)));
|
||||
}
|
||||
innerHits.setDocValueFields(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16)));
|
||||
innerHits.setDocValueFields(randomListStuff(16,
|
||||
() -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16),
|
||||
randomBoolean() ? null : DocValueFieldsContext.USE_DEFAULT_FORMAT)));
|
||||
// Random script fields deduped on their field name.
|
||||
Map<String, SearchSourceBuilder.ScriptField> scriptFields = new HashMap<>();
|
||||
for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) {
|
||||
|
@ -187,9 +191,9 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
modifiers.add(() -> copy.setName(randomValueOtherThan(copy.getName(), () -> randomAlphaOfLengthBetween(1, 16))));
|
||||
modifiers.add(() -> {
|
||||
if (randomBoolean()) {
|
||||
copy.setDocValueFields(randomValueOtherThan(copy.getDocValueFields(), () -> {
|
||||
return randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16));
|
||||
}));
|
||||
copy.setDocValueFields(randomValueOtherThan(copy.getDocValueFields(),
|
||||
() -> randomListStuff(16, () -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16),
|
||||
randomBoolean() ? null : DocValueFieldsContext.USE_DEFAULT_FORMAT))));
|
||||
} else {
|
||||
copy.addDocValueField(randomAlphaOfLengthBetween(1, 16));
|
||||
}
|
||||
|
|
|
@ -85,20 +85,20 @@ public class DocValueFormatTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRawFormat() {
|
||||
assertEquals("0", DocValueFormat.RAW.format(0));
|
||||
assertEquals("-1", DocValueFormat.RAW.format(-1));
|
||||
assertEquals("1", DocValueFormat.RAW.format(1));
|
||||
assertEquals(0L, DocValueFormat.RAW.format(0));
|
||||
assertEquals(-1L, DocValueFormat.RAW.format(-1));
|
||||
assertEquals(1L, DocValueFormat.RAW.format(1));
|
||||
|
||||
assertEquals("0.0", DocValueFormat.RAW.format(0d));
|
||||
assertEquals("0.5", DocValueFormat.RAW.format(.5d));
|
||||
assertEquals("-1.0", DocValueFormat.RAW.format(-1d));
|
||||
assertEquals(0d, DocValueFormat.RAW.format(0d));
|
||||
assertEquals(.5d, DocValueFormat.RAW.format(.5d));
|
||||
assertEquals(-1d, DocValueFormat.RAW.format(-1d));
|
||||
|
||||
assertEquals("abc", DocValueFormat.RAW.format(new BytesRef("abc")));
|
||||
}
|
||||
|
||||
public void testBooleanFormat() {
|
||||
assertEquals("false", DocValueFormat.BOOLEAN.format(0));
|
||||
assertEquals("true", DocValueFormat.BOOLEAN.format(1));
|
||||
assertEquals(false, DocValueFormat.BOOLEAN.format(0));
|
||||
assertEquals(true, DocValueFormat.BOOLEAN.format(1));
|
||||
}
|
||||
|
||||
public void testIpFormat() {
|
||||
|
|
|
@ -37,7 +37,7 @@ public class InternalSumTests extends InternalAggregationTestCase<InternalSum> {
|
|||
@Override
|
||||
protected InternalSum createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
double value = frequently() ? randomDouble() : randomFrom(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN);
|
||||
DocValueFormat formatter = randomFrom(new DocValueFormat.Decimal("###.##"), DocValueFormat.BOOLEAN, DocValueFormat.RAW);
|
||||
DocValueFormat formatter = randomFrom(new DocValueFormat.Decimal("###.##"), DocValueFormat.RAW);
|
||||
return new InternalSum(name, value, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -583,7 +583,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
.highlighter(new HighlightBuilder().field("text"))
|
||||
.explain(true)
|
||||
.storedField("text")
|
||||
.fieldDataField("field1")
|
||||
.docValueField("field1")
|
||||
.scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap()))
|
||||
.fetchSource("text", null)
|
||||
.version(true)
|
||||
|
@ -865,7 +865,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
.addAggregation(
|
||||
nested("to-comments", "comments").subAggregation(
|
||||
topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true)
|
||||
.fieldDataField("comments.user")
|
||||
.docValueField("comments.user")
|
||||
.scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())).fetchSource("comments.message", null)
|
||||
.version(true).sort("comments.date", SortOrder.ASC))).get();
|
||||
assertHitCount(searchResponse, 2);
|
||||
|
|
|
@ -81,7 +81,7 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
|
|||
if (randomBoolean()) {
|
||||
int fieldDataFieldsSize = randomInt(25);
|
||||
for (int i = 0; i < fieldDataFieldsSize; i++) {
|
||||
factory.fieldDataField(randomAlphaOfLengthBetween(5, 50));
|
||||
factory.docValueField(randomAlphaOfLengthBetween(5, 50));
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -803,6 +803,65 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1"));
|
||||
|
||||
builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addDocValueField("text_field", "use_field_mapping")
|
||||
.addDocValueField("keyword_field", "use_field_mapping")
|
||||
.addDocValueField("byte_field", "use_field_mapping")
|
||||
.addDocValueField("short_field", "use_field_mapping")
|
||||
.addDocValueField("integer_field", "use_field_mapping")
|
||||
.addDocValueField("long_field", "use_field_mapping")
|
||||
.addDocValueField("float_field", "use_field_mapping")
|
||||
.addDocValueField("double_field", "use_field_mapping")
|
||||
.addDocValueField("date_field", "use_field_mapping")
|
||||
.addDocValueField("boolean_field", "use_field_mapping")
|
||||
.addDocValueField("ip_field", "use_field_mapping");
|
||||
searchResponse = builder.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
|
||||
fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet());
|
||||
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
|
||||
"float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field",
|
||||
"ip_field")));
|
||||
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
|
||||
equalTo(Joda.forPattern("dateOptionalTime").printer().print(date)));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1"));
|
||||
|
||||
builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addDocValueField("byte_field", "#.0")
|
||||
.addDocValueField("short_field", "#.0")
|
||||
.addDocValueField("integer_field", "#.0")
|
||||
.addDocValueField("long_field", "#.0")
|
||||
.addDocValueField("float_field", "#.0")
|
||||
.addDocValueField("double_field", "#.0")
|
||||
.addDocValueField("date_field", "epoch_millis");
|
||||
searchResponse = builder.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
|
||||
fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet());
|
||||
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
|
||||
"float_field", "double_field", "date_field")));
|
||||
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue(), equalTo("1.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue(), equalTo("2.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo("3.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo("4.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
|
||||
equalTo(Joda.forPattern("epoch_millis").printer().print(date)));
|
||||
}
|
||||
|
||||
public void testScriptFields() throws Exception {
|
||||
|
|
|
@ -23,8 +23,14 @@ Which returns:
|
|||
{
|
||||
"size" : 10,
|
||||
"docvalue_fields" : [
|
||||
"page_count",
|
||||
"release_date"
|
||||
{
|
||||
"field": "page_count",
|
||||
"format": "use_field_mapping"
|
||||
},
|
||||
{
|
||||
"field": "release_date",
|
||||
"format": "epoch_millis"
|
||||
}
|
||||
],
|
||||
"_source": {
|
||||
"includes": [
|
||||
|
|
|
@ -197,6 +197,7 @@ public class MockClientBuilder {
|
|||
when(builder.setFetchSource(anyBoolean())).thenReturn(builder);
|
||||
when(builder.setScroll(anyString())).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class))).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder);
|
||||
when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder);
|
||||
when(builder.setQuery(any())).thenReturn(builder);
|
||||
when(builder.setSize(anyInt())).thenReturn(builder);
|
||||
|
@ -246,6 +247,7 @@ public class MockClientBuilder {
|
|||
when(builder.setSize(eq(size))).thenReturn(builder);
|
||||
when(builder.setFetchSource(eq(true))).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class))).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder);
|
||||
when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder);
|
||||
when(builder.get()).thenReturn(response);
|
||||
when(client.prepareSearch(eq(index))).thenReturn(builder);
|
||||
|
@ -262,6 +264,7 @@ public class MockClientBuilder {
|
|||
when(builder.setSize(any(Integer.class))).thenReturn(builder);
|
||||
when(builder.setFetchSource(eq(true))).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class))).thenReturn(builder);
|
||||
when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder);
|
||||
when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder);
|
||||
when(builder.get()).thenReturn(response);
|
||||
when(client.prepareSearch(eq(index))).thenReturn(builder);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.sql.plugin;
|
||||
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -18,7 +19,7 @@ public class SqlTranslateResponseTests extends AbstractStreamableTestCase<SqlTra
|
|||
if (randomBoolean()) {
|
||||
long docValues = iterations(5, 10);
|
||||
for (int i = 0; i < docValues; i++) {
|
||||
s.docValueField(randomAlphaOfLength(10));
|
||||
s.docValueField(randomAlphaOfLength(10), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -341,12 +341,12 @@ public class Querier {
|
|||
private HitExtractor createExtractor(FieldExtraction ref) {
|
||||
if (ref instanceof SearchHitFieldRef) {
|
||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||
return new FieldHitExtractor(f.name(), f.useDocValue(), f.hitName());
|
||||
return new FieldHitExtractor(f.name(), f.getDataType(), f.useDocValue(), f.hitName());
|
||||
}
|
||||
|
||||
if (ref instanceof ScriptFieldRef) {
|
||||
ScriptFieldRef f = (ScriptFieldRef) ref;
|
||||
return new FieldHitExtractor(f.name(), true);
|
||||
return new FieldHitExtractor(f.name(), null, true);
|
||||
}
|
||||
|
||||
if (ref instanceof ComputedRef) {
|
||||
|
|
|
@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.execution.search;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
|
@ -22,7 +24,7 @@ import java.util.Set;
|
|||
public class SqlSourceBuilder {
|
||||
// The LinkedHashMaps preserve the order of the fields in the response
|
||||
final Set<String> sourceFields = new LinkedHashSet<>();
|
||||
final Set<String> docFields = new LinkedHashSet<>();
|
||||
final Set<FieldAndFormat> docFields = new LinkedHashSet<>();
|
||||
final Map<String, Script> scriptFields = new LinkedHashMap<>();
|
||||
|
||||
boolean trackScores = false;
|
||||
|
@ -47,8 +49,8 @@ public class SqlSourceBuilder {
|
|||
/**
|
||||
* Retrieve the requested field from doc values (or fielddata) of the document
|
||||
*/
|
||||
public void addDocField(String field) {
|
||||
docFields.add(field);
|
||||
public void addDocField(String field, String format) {
|
||||
docFields.add(new FieldAndFormat(field, format));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -67,7 +69,8 @@ public class SqlSourceBuilder {
|
|||
if (!sourceFields.isEmpty()) {
|
||||
sourceBuilder.fetchSource(sourceFields.toArray(Strings.EMPTY_ARRAY), null);
|
||||
}
|
||||
docFields.forEach(sourceBuilder::docValueField);
|
||||
docFields.forEach(field -> sourceBuilder.docValueField(field.field,
|
||||
field.format == null ? DocValueFieldsContext.USE_DEFAULT_FORMAT : field.format));
|
||||
scriptFields.forEach(sourceBuilder::scriptField);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,12 +5,16 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,15 +45,17 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
}
|
||||
|
||||
private final String fieldName, hitName;
|
||||
private final DataType dataType;
|
||||
private final boolean useDocValue;
|
||||
private final String[] path;
|
||||
|
||||
public FieldHitExtractor(String name, boolean useDocValue) {
|
||||
this(name, useDocValue, null);
|
||||
public FieldHitExtractor(String name, DataType dataType, boolean useDocValue) {
|
||||
this(name, dataType, useDocValue, null);
|
||||
}
|
||||
|
||||
public FieldHitExtractor(String name, boolean useDocValue, String hitName) {
|
||||
public FieldHitExtractor(String name, DataType dataType, boolean useDocValue, String hitName) {
|
||||
this.fieldName = name;
|
||||
this.dataType = dataType;
|
||||
this.useDocValue = useDocValue;
|
||||
this.hitName = hitName;
|
||||
|
||||
|
@ -64,6 +70,16 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
|
||||
FieldHitExtractor(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
String esType = in.readOptionalString();
|
||||
if (esType != null) {
|
||||
dataType = DataType.fromEsType(esType);
|
||||
} else {
|
||||
dataType = null;
|
||||
}
|
||||
} else {
|
||||
dataType = null;
|
||||
}
|
||||
useDocValue = in.readBoolean();
|
||||
hitName = in.readOptionalString();
|
||||
path = sourcePath(fieldName, useDocValue, hitName);
|
||||
|
@ -77,6 +93,9 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeOptionalString(dataType == null ? null : dataType.esType);
|
||||
}
|
||||
out.writeBoolean(useDocValue);
|
||||
out.writeOptionalString(hitName);
|
||||
}
|
||||
|
@ -117,6 +136,9 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
if (values instanceof Map) {
|
||||
throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName);
|
||||
}
|
||||
if (values instanceof String && dataType == DataType.DATE) {
|
||||
return new DateTime(Long.parseLong(values.toString()), DateTimeZone.UTC);
|
||||
}
|
||||
if (values instanceof Long || values instanceof Double || values instanceof String || values instanceof Boolean
|
||||
|| values instanceof ReadableDateTime) {
|
||||
return values;
|
||||
|
|
|
@ -9,9 +9,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeFieldType;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableDateTime;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -78,15 +80,21 @@ public class DateTimeProcessor implements Processor {
|
|||
return null;
|
||||
}
|
||||
|
||||
if (!(l instanceof ReadableDateTime)) {
|
||||
throw new SqlIllegalArgumentException("A date/time is required; received {}", l);
|
||||
}
|
||||
|
||||
ReadableDateTime dt = (ReadableDateTime) l;
|
||||
|
||||
ReadableDateTime dt;
|
||||
if (l instanceof String) {
|
||||
// 6.4+
|
||||
final long millis = Long.parseLong(l.toString());
|
||||
dt = new DateTime(millis, DateTimeZone.forTimeZone(timeZone));
|
||||
} else if (l instanceof ReadableInstant) {
|
||||
// 6.3-
|
||||
dt = (ReadableDateTime) l;
|
||||
if (!TimeZone.getTimeZone("UTC").equals(timeZone)) {
|
||||
dt = dt.toDateTime().withZone(DateTimeZone.forTimeZone(timeZone));
|
||||
}
|
||||
} else {
|
||||
throw new SqlIllegalArgumentException("A string or a date is required; received {}", l);
|
||||
}
|
||||
|
||||
return extractor.extract(dt);
|
||||
}
|
||||
|
||||
|
|
|
@ -173,7 +173,7 @@ public class QueryContainer {
|
|||
// reference methods
|
||||
//
|
||||
private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
|
||||
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().hasDocValues());
|
||||
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().getDataType(), fieldAttr.field().hasDocValues());
|
||||
}
|
||||
|
||||
private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
|
||||
|
@ -184,7 +184,8 @@ public class QueryContainer {
|
|||
Query q = rewriteToContainNestedField(query, attr.location(),
|
||||
attr.nestedParent().name(), name, attr.field().hasDocValues());
|
||||
|
||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().hasDocValues(), attr.parent().name());
|
||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(),
|
||||
attr.field().hasDocValues(), attr.parent().name());
|
||||
nestedRefs.add(nestedFieldRef);
|
||||
|
||||
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
|
||||
|
|
|
@ -6,18 +6,21 @@
|
|||
package org.elasticsearch.xpack.sql.querydsl.container;
|
||||
|
||||
import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
|
||||
public class SearchHitFieldRef extends FieldReference {
|
||||
private final String name;
|
||||
private final DataType dataType;
|
||||
private final boolean docValue;
|
||||
private final String hitName;
|
||||
|
||||
public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource) {
|
||||
this(name, useDocValueInsteadOfSource, null);
|
||||
public SearchHitFieldRef(String name, DataType dataType, boolean useDocValueInsteadOfSource) {
|
||||
this(name, dataType, useDocValueInsteadOfSource, null);
|
||||
}
|
||||
|
||||
public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource, String hitName) {
|
||||
public SearchHitFieldRef(String name, DataType dataType, boolean useDocValueInsteadOfSource, String hitName) {
|
||||
this.name = name;
|
||||
this.dataType = dataType;
|
||||
this.docValue = useDocValueInsteadOfSource;
|
||||
this.hitName = hitName;
|
||||
}
|
||||
|
@ -31,6 +34,10 @@ public class SearchHitFieldRef extends FieldReference {
|
|||
return name;
|
||||
}
|
||||
|
||||
public DataType getDataType() {
|
||||
return dataType;
|
||||
}
|
||||
|
||||
public boolean useDocValue() {
|
||||
return docValue;
|
||||
}
|
||||
|
@ -42,7 +49,8 @@ public class SearchHitFieldRef extends FieldReference {
|
|||
return;
|
||||
}
|
||||
if (docValue) {
|
||||
sourceBuilder.addDocField(name);
|
||||
String format = dataType == DataType.DATE ? "epoch_millis" : null;
|
||||
sourceBuilder.addDocField(name, format);
|
||||
} else {
|
||||
sourceBuilder.addSourceField(name);
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.license.License;
|
|||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||
import org.elasticsearch.transport.Netty4Plugin;
|
||||
|
@ -150,7 +151,8 @@ public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase {
|
|||
|
||||
SqlTranslateResponse response = client().prepareExecute(SqlTranslateAction.INSTANCE).query("SELECT * FROM test").get();
|
||||
SearchSourceBuilder source = response.source();
|
||||
assertThat(source.docValueFields(), Matchers.contains("count"));
|
||||
assertThat(source.docValueFields(), Matchers.contains(
|
||||
new DocValueFieldsContext.FieldAndFormat("count", DocValueFieldsContext.USE_DEFAULT_FORMAT)));
|
||||
FetchSourceContext fetchSource = source.fetchSource();
|
||||
assertThat(fetchSource.includes(), Matchers.arrayContaining("data"));
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.action;
|
|||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.sql.plugin.SqlTranslateAction;
|
||||
|
@ -35,7 +36,9 @@ public class SqlTranslateActionIT extends AbstractSqlIntegTestCase {
|
|||
FetchSourceContext fetch = source.fetchSource();
|
||||
assertEquals(true, fetch.fetchSource());
|
||||
assertArrayEquals(new String[] { "data" }, fetch.includes());
|
||||
assertEquals(singletonList("count"), source.docValueFields());
|
||||
assertEquals(
|
||||
singletonList(new DocValueFieldsContext.FieldAndFormat("count", DocValueFieldsContext.USE_DEFAULT_FORMAT)),
|
||||
source.docValueFields());
|
||||
assertEquals(singletonList(SortBuilders.fieldSort("count")), source.sorts());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,8 +24,8 @@ public class SqlSourceBuilderTests extends ESTestCase {
|
|||
ssb.trackScores();
|
||||
ssb.addSourceField("foo");
|
||||
ssb.addSourceField("foo2");
|
||||
ssb.addDocField("bar");
|
||||
ssb.addDocField("bar2");
|
||||
ssb.addDocField("bar", null);
|
||||
ssb.addDocField("bar2", null);
|
||||
final Script s = new Script("eggplant");
|
||||
ssb.addScriptField("baz", s);
|
||||
final Script s2 = new Script("potato");
|
||||
|
@ -35,7 +35,7 @@ public class SqlSourceBuilderTests extends ESTestCase {
|
|||
assertTrue(source.trackScores());
|
||||
FetchSourceContext fsc = source.fetchSource();
|
||||
assertThat(Arrays.asList(fsc.includes()), contains("foo", "foo2"));
|
||||
assertThat(source.docValueFields(), contains("bar", "bar2"));
|
||||
assertThat(source.docValueFields().stream().map(ff -> ff.field).collect(Collectors.toList()), contains("bar", "bar2"));
|
||||
Map<String, Script> scriptFields = source.scriptFields()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(SearchSourceBuilder.ScriptField::fieldName, SearchSourceBuilder.ScriptField::script));
|
||||
|
|
|
@ -70,7 +70,7 @@ public class ComputingExtractorTests extends AbstractWireSerializingTestCase<Com
|
|||
public void testGet() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
ChainingProcessor extractor = new ChainingProcessor(
|
||||
new HitExtractorProcessor(new FieldHitExtractor(fieldName, true)),
|
||||
new HitExtractorProcessor(new FieldHitExtractor(fieldName, null, true)),
|
||||
new MathProcessor(MathOperation.LOG));
|
||||
|
||||
int times = between(1, 1000);
|
||||
|
|
|
@ -14,6 +14,9 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlException;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -31,7 +34,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
public static FieldHitExtractor randomFieldHitExtractor() {
|
||||
String hitName = randomAlphaOfLength(5);
|
||||
String name = randomAlphaOfLength(5) + "." + hitName;
|
||||
return new FieldHitExtractor(name, randomBoolean(), hitName);
|
||||
return new FieldHitExtractor(name, null, randomBoolean(), hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -46,7 +49,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
@Override
|
||||
protected FieldHitExtractor mutateInstance(FieldHitExtractor instance) throws IOException {
|
||||
return new FieldHitExtractor(instance.fieldName() + "mutated", true, instance.hitName());
|
||||
return new FieldHitExtractor(instance.fieldName() + "mutated", null, true, instance.hitName());
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "implement after we're sure of the InnerHitExtractor's implementation")
|
||||
|
@ -60,7 +63,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -84,7 +87,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -123,7 +126,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testGetDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -139,9 +142,19 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetDate() {
|
||||
long millis = 1526467911780L;
|
||||
List<Object> documentFieldValues = Collections.singletonList(Long.toString(millis));
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField("my_date_field", documentFieldValues);
|
||||
hit.fields(singletonMap("my_date_field", field));
|
||||
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATE, true);
|
||||
assertEquals(new DateTime(millis, DateTimeZone.UTC), extractor.extract(hit));
|
||||
}
|
||||
|
||||
public void testGetSource() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -164,12 +177,12 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("hit.field@hit", new FieldHitExtractor("hit.field", true, "hit").toString());
|
||||
assertEquals("hit.field@hit", new FieldHitExtractor("hit.field", null, true, "hit").toString());
|
||||
}
|
||||
|
||||
public void testMultiValuedDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, true);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, true);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, asList("a", "b"));
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
|
@ -179,7 +192,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testMultiValuedSourceValue() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, false);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
source.startObject(); {
|
||||
|
@ -194,7 +207,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testSingleValueArrayInSource() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, false);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
Object value = randomValue();
|
||||
|
@ -208,14 +221,14 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testExtractSourcePath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", false);
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
assertThat(fe.extractFromSource(map), is(value));
|
||||
}
|
||||
|
||||
public void testExtractSourceIncorrectPath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", false);
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", null, false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map));
|
||||
|
@ -223,7 +236,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testMultiValuedSource() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", false);
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", null, false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", asList(value, value));
|
||||
SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map));
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
---
|
||||
"Translate SQL":
|
||||
- skip:
|
||||
version: " - 6.99.99" # TODO: change version on backport
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
|
@ -23,7 +28,8 @@
|
|||
- str
|
||||
excludes: []
|
||||
docvalue_fields:
|
||||
- int
|
||||
- field: int
|
||||
format: use_field_mapping
|
||||
sort:
|
||||
- int:
|
||||
order: asc
|
||||
|
|
|
@ -103,7 +103,10 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
assertThat(readLine(), startsWith(" \"excludes\" : [ ]"));
|
||||
assertThat(readLine(), startsWith(" },"));
|
||||
assertThat(readLine(), startsWith(" \"docvalue_fields\" : ["));
|
||||
assertThat(readLine(), startsWith(" \"i\""));
|
||||
assertThat(readLine(), startsWith(" {"));
|
||||
assertThat(readLine(), startsWith(" \"field\" : \"i\""));
|
||||
assertThat(readLine(), startsWith(" \"format\" : \"use_field_mapping\""));
|
||||
assertThat(readLine(), startsWith(" }"));
|
||||
assertThat(readLine(), startsWith(" ],"));
|
||||
assertThat(readLine(), startsWith(" \"sort\" : ["));
|
||||
assertThat(readLine(), startsWith(" {"));
|
||||
|
|
Loading…
Reference in New Issue