Use mappings to format doc-value fields by default. (#30831)
Doc-value fields now return a value that is based on the mappings rather than the script implementation by default. This deprecates the special `use_field_mapping` docvalue format which was added in #29639 only to ease the transition to 7.x and it is not necessary anymore in 7.0.
This commit is contained in:
parent
b63b50b945
commit
c8af0f4bfa
|
@ -122,6 +122,16 @@ using the "all fields" mode ("default_field": "*") or other fieldname expansions
|
|||
Search requests with extra content after the main object will no longer be accepted
|
||||
by the `_search` endpoint. A parsing exception will be thrown instead.
|
||||
|
||||
[float]
|
||||
==== Doc-value fields default format
|
||||
|
||||
The format of doc-value fields is changing to be the same as what could be
|
||||
obtained in 6.x with the special `use_field_mapping` format. This is mostly a
|
||||
change for date fields, which are now formatted based on the format that is
|
||||
configured in the mappings by default. This behavior can be changed by
|
||||
specifying a <<search-request-docvalue-fields,`format`>> within the doc-value
|
||||
field.
|
||||
|
||||
[float]
|
||||
==== Context Completion Suggester
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ GET /_search
|
|||
"match_all": {}
|
||||
},
|
||||
"docvalue_fields" : [
|
||||
"my_ip_field", <1>
|
||||
{
|
||||
"field": "my_ip_field", <1>
|
||||
"format": "use_field_mapping" <2>
|
||||
"field": "my_keyword_field" <2>
|
||||
},
|
||||
{
|
||||
"field": "my_date_field",
|
||||
|
@ -25,10 +25,10 @@ GET /_search
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> the name of the field
|
||||
<2> the special `use_field_mapping` format tells Elasticsearch to use the format from the mapping
|
||||
<3> date fields may use a custom format
|
||||
<2> an object notation is supported as well
|
||||
<3> the object notation allows to specify a custom format
|
||||
|
||||
Doc value fields can work on fields that are not stored.
|
||||
Doc value fields can work on fields that have doc-values enabled, regardless of whether they are stored
|
||||
|
||||
`*` can be used as a wild card, for example:
|
||||
|
||||
|
@ -41,8 +41,8 @@ GET /_search
|
|||
},
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "*field", <1>
|
||||
"format": "use_field_mapping" <2>
|
||||
"field": "*_date_field", <1>
|
||||
"format": "epoch_millis" <2>
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -62,9 +62,8 @@ While most fields do not support custom formats, some of them do:
|
|||
- <<date,Date>> fields can take any <<mapping-date-format,date format>>.
|
||||
- <<number,Numeric>> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern].
|
||||
|
||||
All fields support the special `use_field_mapping` format, which tells
|
||||
Elasticsearch to use the mappings to figure out a default format.
|
||||
By default fields are formatted based on a sensible configuration that depends
|
||||
on their mappings: `long`, `double` and other numeric fields are formatted as
|
||||
numbers, `keyword` fields are formatted as strings, `date` fields are formatted
|
||||
with the configured `date` format, etc.
|
||||
|
||||
NOTE: The default is currently to return the same output as
|
||||
<<search-request-script-fields,script fields>>. However it will change in 7.0
|
||||
to behave as if the `use_field_mapping` format was provided.
|
||||
|
|
|
@ -246,10 +246,7 @@ POST test/_search
|
|||
"inner_hits": {
|
||||
"_source" : false,
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "comments.text.keyword",
|
||||
"format": "use_field_mapping"
|
||||
}
|
||||
"comments.text.keyword"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,8 +27,7 @@ Which returns:
|
|||
"size" : 10,
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "page_count",
|
||||
"format": "use_field_mapping"
|
||||
"field": "page_count"
|
||||
},
|
||||
{
|
||||
"field": "release_date",
|
||||
|
|
|
@ -201,7 +201,7 @@ public class QueryBuilderBWCIT extends AbstractFullClusterRestartTestCase {
|
|||
QueryBuilder expectedQueryBuilder = (QueryBuilder) CANDIDATES.get(i)[1];
|
||||
Request request = new Request("GET", "/" + index + "/_search");
|
||||
request.setJsonEntity("{\"query\": {\"ids\": {\"values\": [\"" + Integer.toString(i) + "\"]}}, " +
|
||||
"\"docvalue_fields\": [{\"field\":\"query.query_builder_field\", \"format\":\"use_field_mapping\"}]}");
|
||||
"\"docvalue_fields\": [{\"field\":\"query.query_builder_field\"}]}");
|
||||
Response rsp = client().performRequest(request);
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Map<?, ?> hitRsp = (Map<?, ?>) ((List<?>) ((Map<?, ?>)toMap(rsp).get("hits")).get("hits")).get(0);
|
||||
|
|
|
@ -46,8 +46,8 @@ setup:
|
|||
"Nested doc version and seqIDs":
|
||||
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: "object notation for docvalue_fields was introduced in 6.4"
|
||||
version: " - 6.99.99"
|
||||
reason: "Triggers warnings before 7.0"
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
@ -62,7 +62,7 @@ setup:
|
|||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ "_seq_no" ]} }}, "version": true, "docvalue_fields" : [ "_seq_no" ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
@ -86,7 +86,7 @@ setup:
|
|||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ "_seq_no" ]} }}, "version": true, "docvalue_fields" : [ "_seq_no" ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
|
|
@ -144,12 +144,9 @@ setup:
|
|||
---
|
||||
"docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
version: " - 6.9.99"
|
||||
reason: Triggers a deprecation warning before 7.0
|
||||
- do:
|
||||
warnings:
|
||||
- 'There are doc-value fields which are not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with a doc value field in order to opt in for the future behaviour and ease the migration to 7.0: [count]'
|
||||
search:
|
||||
body:
|
||||
docvalue_fields: [ "count" ]
|
||||
|
@ -158,12 +155,9 @@ setup:
|
|||
---
|
||||
"multiple docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
version: " - 6.9.99"
|
||||
reason: Triggered a deprecation warning before 7.0
|
||||
- do:
|
||||
warnings:
|
||||
- 'There are doc-value fields which are not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with a doc value field in order to opt in for the future behaviour and ease the migration to 7.0: [count, include.field1.keyword]'
|
||||
search:
|
||||
body:
|
||||
docvalue_fields: [ "count", "include.field1.keyword" ]
|
||||
|
@ -172,12 +166,9 @@ setup:
|
|||
---
|
||||
"docvalue_fields as url param":
|
||||
- skip:
|
||||
version: " - 6.4.0"
|
||||
reason: format option was added in 6.4 and the deprecation message changed in 6.4.1
|
||||
features: warnings
|
||||
version: " - 6.99.99"
|
||||
reason: Triggered a deprecation warning before 7.0
|
||||
- do:
|
||||
warnings:
|
||||
- 'There are doc-value fields which are not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with a doc value field in order to opt in for the future behaviour and ease the migration to 7.0: [count]'
|
||||
search:
|
||||
docvalue_fields: [ "count" ]
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
@ -185,9 +176,12 @@ setup:
|
|||
---
|
||||
"docvalue_fields with default format":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
version: " - 6.99.99"
|
||||
reason: Only triggers warnings on 7.0+
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
- "[use_field_mapping] is a special format that was only used to ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore."
|
||||
search:
|
||||
body:
|
||||
docvalue_fields:
|
||||
|
|
|
@ -67,8 +67,8 @@ setup:
|
|||
"Docvalues_fields size limit":
|
||||
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: "The object notation for docvalue_fields is only supported on 6.4+"
|
||||
version: " - 6.99.99"
|
||||
reason: "Triggers warnings before 7.0"
|
||||
- do:
|
||||
catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./
|
||||
search:
|
||||
|
@ -78,12 +78,9 @@ setup:
|
|||
query:
|
||||
match_all: {}
|
||||
docvalue_fields:
|
||||
- field: "one"
|
||||
format: "use_field_mapping"
|
||||
- field: "two"
|
||||
format: "use_field_mapping"
|
||||
- field: "three"
|
||||
format: "use_field_mapping"
|
||||
- "one"
|
||||
- "two"
|
||||
- "three"
|
||||
|
||||
---
|
||||
"Script_fields size limit":
|
||||
|
|
|
@ -38,8 +38,6 @@ import java.util.Objects;
|
|||
*/
|
||||
public class DocValueFieldsContext {
|
||||
|
||||
public static final String USE_DEFAULT_FORMAT = "use_field_mapping";
|
||||
|
||||
/**
|
||||
* Wrapper around a field name and the format that should be used to
|
||||
* display values of this field.
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
|
|||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -46,7 +45,6 @@ import java.util.Comparator;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Query sub phase which pulls data from doc values
|
||||
|
@ -55,7 +53,8 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||
private static final String USE_DEFAULT_FORMAT = "use_field_mapping";
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(
|
||||
LogManager.getLogger(DocValueFieldsFetchSubPhase.class));
|
||||
|
||||
@Override
|
||||
|
@ -66,9 +65,9 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
String name = context.collapse().getFieldName();
|
||||
if (context.docValueFieldsContext() == null) {
|
||||
context.docValueFieldsContext(new DocValueFieldsContext(
|
||||
Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT))));
|
||||
Collections.singletonList(new FieldAndFormat(name, null))));
|
||||
} else if (context.docValueFieldsContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) {
|
||||
context.docValueFieldsContext().fields().add(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
context.docValueFieldsContext().fields().add(new FieldAndFormat(name, null));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,13 +78,13 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
hits = hits.clone(); // don't modify the incoming hits
|
||||
Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId));
|
||||
|
||||
List<String> noFormatFields = context.docValueFieldsContext().fields().stream().filter(f -> f.format == null).map(f -> f.field)
|
||||
.collect(Collectors.toList());
|
||||
if (noFormatFields.isEmpty() == false) {
|
||||
deprecationLogger.deprecated("There are doc-value fields which are not using a format. The output will "
|
||||
+ "change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass "
|
||||
+ "[format={}] with a doc value field in order to opt in for the future behaviour and ease the migration to "
|
||||
+ "7.0: {}", DocValueFieldsContext.USE_DEFAULT_FORMAT, noFormatFields);
|
||||
if (context.docValueFieldsContext().fields().stream()
|
||||
.map(f -> f.format)
|
||||
.filter(USE_DEFAULT_FORMAT::equals)
|
||||
.findAny()
|
||||
.isPresent()) {
|
||||
DEPRECATION_LOGGER.deprecated("[" + USE_DEFAULT_FORMAT + "] is a special format that was only used to " +
|
||||
"ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore.");
|
||||
}
|
||||
|
||||
for (FieldAndFormat fieldAndFormat : context.docValueFieldsContext().fields()) {
|
||||
|
@ -93,19 +92,14 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
MappedFieldType fieldType = context.mapperService().fullName(field);
|
||||
if (fieldType != null) {
|
||||
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
|
||||
final DocValueFormat format;
|
||||
if (fieldAndFormat.format == null) {
|
||||
format = null;
|
||||
} else {
|
||||
String formatDesc = fieldAndFormat.format;
|
||||
if (Objects.equals(formatDesc, DocValueFieldsContext.USE_DEFAULT_FORMAT)) {
|
||||
formatDesc = null;
|
||||
}
|
||||
format = fieldType.docValueFormat(formatDesc, null);
|
||||
String formatDesc = fieldAndFormat.format;
|
||||
if (Objects.equals(formatDesc, USE_DEFAULT_FORMAT)) {
|
||||
// TODO: Remove in 8.x
|
||||
formatDesc = null;
|
||||
}
|
||||
final DocValueFormat format = fieldType.docValueFormat(formatDesc, null);
|
||||
LeafReaderContext subReaderContext = null;
|
||||
AtomicFieldData data = null;
|
||||
ScriptDocValues<?> scriptValues = null; // legacy
|
||||
SortedBinaryDocValues binaryValues = null; // binary / string / ip fields
|
||||
SortedNumericDocValues longValues = null; // int / date fields
|
||||
SortedNumericDoubleValues doubleValues = null; // floating-point fields
|
||||
|
@ -115,9 +109,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
int readerIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves());
|
||||
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
||||
data = indexFieldData.load(subReaderContext);
|
||||
if (format == null) {
|
||||
scriptValues = data.getLegacyFieldValues();
|
||||
} else if (indexFieldData instanceof IndexNumericFieldData) {
|
||||
if (indexFieldData instanceof IndexNumericFieldData) {
|
||||
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
|
||||
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
|
||||
} else {
|
||||
|
@ -138,10 +130,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
|||
final List<Object> values = hitField.getValues();
|
||||
|
||||
int subDocId = hit.docId() - subReaderContext.docBase;
|
||||
if (scriptValues != null) {
|
||||
scriptValues.setNextDocId(subDocId);
|
||||
values.addAll(scriptValues);
|
||||
} else if (binaryValues != null) {
|
||||
if (binaryValues != null) {
|
||||
if (binaryValues.advanceExact(subDocId)) {
|
||||
for (int i = 0, count = binaryValues.docValueCount(); i < count; ++i) {
|
||||
values.add(format.format(binaryValues.nextValue()));
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests;
|
||||
|
@ -158,8 +157,7 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16)));
|
||||
}
|
||||
innerHits.setDocValueFields(randomListStuff(16,
|
||||
() -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16),
|
||||
randomBoolean() ? null : DocValueFieldsContext.USE_DEFAULT_FORMAT)));
|
||||
() -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16), null)));
|
||||
// Random script fields deduped on their field name.
|
||||
Map<String, SearchSourceBuilder.ScriptField> scriptFields = new HashMap<>();
|
||||
for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) {
|
||||
|
@ -201,8 +199,7 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
modifiers.add(() -> {
|
||||
if (randomBoolean()) {
|
||||
copy.setDocValueFields(randomValueOtherThan(copy.getDocValueFields(),
|
||||
() -> randomListStuff(16, () -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16),
|
||||
randomBoolean() ? null : DocValueFieldsContext.USE_DEFAULT_FORMAT))));
|
||||
() -> randomListStuff(16, () -> new FieldAndFormat(randomAlphaOfLengthBetween(1, 16), null))));
|
||||
} else {
|
||||
copy.addDocValueField(randomAlphaOfLengthBetween(1, 16));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.fields;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -49,7 +48,6 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableDateTime;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
|
||||
import java.time.ZoneOffset;
|
||||
|
@ -804,13 +802,12 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
||||
DateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
||||
assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli()));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
|
||||
equalTo(DateFormatter.forPattern("dateOptionalTime").format(date)));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(),
|
||||
equalTo(new BytesRef(new byte[] {42, 100})));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1"));
|
||||
|
||||
builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||
|
@ -830,13 +827,12 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
|
||||
dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
|
||||
assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli()));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
|
||||
equalTo(DateFormatter.forPattern("dateOptionalTime").format(date)));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(),
|
||||
equalTo(new BytesRef(new byte[] {42, 100})));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1"));
|
||||
|
||||
builder = client().prepareSearch().setQuery(matchAllQuery())
|
||||
|
@ -1001,9 +997,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
|
||||
DocumentField dateField = fields.get("date_field");
|
||||
assertThat(dateField.getName(), equalTo("date_field"));
|
||||
|
||||
ReadableDateTime fetchedDate = dateField.getValue();
|
||||
assertThat(fetchedDate.getMillis(), equalTo(date.toInstant().getMillis()));
|
||||
assertThat(dateField.getValue(), equalTo("1990-12-29"));
|
||||
}
|
||||
|
||||
public void testWildcardDocValueFieldsWithFieldAlias() throws Exception {
|
||||
|
@ -1065,9 +1059,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
|
||||
DocumentField dateField = fields.get("date_field");
|
||||
assertThat(dateField.getName(), equalTo("date_field"));
|
||||
|
||||
ReadableDateTime fetchedDate = dateField.getValue();
|
||||
assertThat(fetchedDate.getMillis(), equalTo(date.toInstant().getMillis()));
|
||||
assertThat(dateField.getValue(), equalTo("1990-12-29"));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.fields;
|
|||
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -52,7 +51,7 @@ public abstract class ExtractedField {
|
|||
public abstract Object[] value(SearchHit hit);
|
||||
|
||||
public String getDocValueFormat() {
|
||||
return DocValueFieldsContext.USE_DEFAULT_FORMAT;
|
||||
return null;
|
||||
}
|
||||
|
||||
public static ExtractedField newTimeField(String name, ExtractionMethod extractionMethod) {
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.index.query.TermsQueryBuilder;
|
|||
import org.elasticsearch.index.query.WildcardQueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.xpack.core.ClientHelper;
|
||||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
|
||||
|
@ -198,7 +197,7 @@ public class DatafeedConfigProvider {
|
|||
public void findDatafeedsForJobIds(Collection<String> jobIds, ActionListener<Set<String>> listener) {
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedJobIdsQuery(jobIds));
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
|
||||
|
||||
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
@ -366,7 +365,7 @@ public class DatafeedConfigProvider {
|
|||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedIdQuery(tokens));
|
||||
sourceBuilder.sort(DatafeedConfig.ID.getPreferredName());
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
|
||||
|
||||
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
|
|
@ -52,7 +52,6 @@ import org.elasticsearch.index.query.TermsQueryBuilder;
|
|||
import org.elasticsearch.index.query.WildcardQueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
||||
|
@ -424,7 +423,7 @@ public class JobConfigProvider {
|
|||
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder);
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
|
||||
|
||||
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
@ -509,8 +508,8 @@ public class JobConfigProvider {
|
|||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildQuery(tokens, excludeDeleting));
|
||||
sourceBuilder.sort(Job.ID.getPreferredName());
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
|
||||
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null);
|
||||
|
||||
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
@ -554,8 +553,8 @@ public class JobConfigProvider {
|
|||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildQuery(tokens, excludeDeleting));
|
||||
sourceBuilder.sort(Job.ID.getPreferredName());
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
|
||||
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null);
|
||||
|
||||
return client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
@ -638,7 +637,7 @@ public class JobConfigProvider {
|
|||
.query(new TermsQueryBuilder(Job.GROUPS.getPreferredName(), groupIds));
|
||||
sourceBuilder.sort(Job.ID.getPreferredName(), SortOrder.DESC);
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
|
||||
|
||||
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.ml.datafeed.extractor.fields;
|
||||
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
|
||||
|
||||
|
@ -143,7 +142,7 @@ public class ExtractedFieldTests extends ESTestCase {
|
|||
|
||||
public void testGetDocValueFormat() {
|
||||
for (ExtractedField.ExtractionMethod method : ExtractedField.ExtractionMethod.values()) {
|
||||
assertThat(ExtractedField.newField("f", method).getDocValueFormat(), equalTo(DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
assertThat(ExtractedField.newField("f", method).getDocValueFormat(), equalTo(null));
|
||||
}
|
||||
assertThat(ExtractedField.newTimeField("doc_value_time", ExtractedField.ExtractionMethod.DOC_VALUE).getDocValueFormat(),
|
||||
equalTo("epoch_millis"));
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.fields;
|
|||
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
|
||||
|
@ -63,7 +62,7 @@ public class ExtractedFieldsTests extends ESTestCase {
|
|||
assertThat(extractedFields.getDocValueFields().get(0).getName(), equalTo("time"));
|
||||
assertThat(extractedFields.getDocValueFields().get(0).getDocValueFormat(), equalTo("epoch_millis"));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getName(), equalTo("value"));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getDocValueFormat(), equalTo(DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getDocValueFormat(), equalTo(null));
|
||||
assertThat(extractedFields.getSourceFields(), equalTo(new String[] {"airline"}));
|
||||
assertThat(extractedFields.getAllFields().size(), equalTo(4));
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
|||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
|
||||
|
@ -134,7 +133,7 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase {
|
|||
assertThat(extractedFields.getDocValueFields().get(0).getName(), equalTo("time"));
|
||||
assertThat(extractedFields.getDocValueFields().get(0).getDocValueFormat(), equalTo("epoch_millis"));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getName(), equalTo("value"));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getDocValueFormat(), equalTo(DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
assertThat(extractedFields.getDocValueFields().get(1).getDocValueFormat(), equalTo(null));
|
||||
assertThat(extractedFields.getSourceFields().length, equalTo(1));
|
||||
assertThat(extractedFields.getSourceFields()[0], equalTo("airline"));
|
||||
assertThat(extractedFields.getAllFields().size(), equalTo(4));
|
||||
|
|
|
@ -103,7 +103,6 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
assertThat(readLine(), startsWith(" \"docvalue_fields\" : ["));
|
||||
assertThat(readLine(), startsWith(" {"));
|
||||
assertThat(readLine(), startsWith(" \"field\" : \"i\""));
|
||||
assertThat(readLine(), startsWith(" \"format\" : \"use_field_mapping\""));
|
||||
assertThat(readLine(), startsWith(" }"));
|
||||
assertThat(readLine(), startsWith(" ],"));
|
||||
assertThat(readLine(), startsWith(" \"sort\" : ["));
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.sql.action;
|
||||
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
import org.elasticsearch.xpack.sql.action.SqlTranslateResponse;
|
||||
|
||||
|
@ -20,7 +19,7 @@ public class SqlTranslateResponseTests extends AbstractStreamableTestCase<SqlTra
|
|||
if (randomBoolean()) {
|
||||
long docValues = iterations(5, 10);
|
||||
for (int i = 0; i < docValues; i++) {
|
||||
s.docValueField(randomAlphaOfLength(10), DocValueFieldsContext.USE_DEFAULT_FORMAT);
|
||||
s.docValueField(randomAlphaOfLength(10));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.execution.search;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -69,8 +68,7 @@ public class SqlSourceBuilder {
|
|||
if (!sourceFields.isEmpty()) {
|
||||
sourceBuilder.fetchSource(sourceFields.toArray(Strings.EMPTY_ARRAY), null);
|
||||
}
|
||||
docFields.forEach(field -> sourceBuilder.docValueField(field.field,
|
||||
field.format == null ? DocValueFieldsContext.USE_DEFAULT_FORMAT : field.format));
|
||||
docFields.forEach(field -> sourceBuilder.docValueField(field.field, field.format));
|
||||
scriptFields.forEach(sourceBuilder::scriptField);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.elasticsearch.common.collect.Tuple;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
|
||||
import org.elasticsearch.xpack.sql.execution.search.SourceGenerator;
|
||||
|
@ -183,7 +182,7 @@ public class QueryContainer {
|
|||
List<FieldExtraction> nestedRefs = new ArrayList<>();
|
||||
|
||||
String name = aliasName(attr);
|
||||
String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT;
|
||||
String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : null;
|
||||
Query q = rewriteToContainNestedField(query, attr.source(),
|
||||
attr.nestedParent().name(), name, format, attr.field().isAggregatable());
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase {
|
|||
.query("SELECT * FROM test").get();
|
||||
SearchSourceBuilder source = response.source();
|
||||
assertThat(source.docValueFields(), Matchers.contains(
|
||||
new DocValueFieldsContext.FieldAndFormat("count", DocValueFieldsContext.USE_DEFAULT_FORMAT)));
|
||||
new DocValueFieldsContext.FieldAndFormat("count", null)));
|
||||
FetchSourceContext fetchSource = source.fetchSource();
|
||||
assertThat(fetchSource.includes(), Matchers.arrayContaining("data"));
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class SqlTranslateActionIT extends AbstractSqlIntegTestCase {
|
|||
assertTrue(fetch.fetchSource());
|
||||
assertArrayEquals(new String[] { "data" }, fetch.includes());
|
||||
assertEquals(
|
||||
singletonList(new DocValueFieldsContext.FieldAndFormat("count", DocValueFieldsContext.USE_DEFAULT_FORMAT)),
|
||||
singletonList(new DocValueFieldsContext.FieldAndFormat("count", null)),
|
||||
source.docValueFields());
|
||||
assertEquals(singletonList(SortBuilders.fieldSort("count").missing("_last").unmappedType("long")), source.sorts());
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.querydsl.container;
|
||||
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.MatchAll;
|
||||
|
@ -24,7 +23,7 @@ public class QueryContainerTests extends ESTestCase {
|
|||
private Source source = SourceTests.randomSource();
|
||||
private String path = randomAlphaOfLength(5);
|
||||
private String name = randomAlphaOfLength(5);
|
||||
private String format = DocValueFieldsContext.USE_DEFAULT_FORMAT;
|
||||
private String format = null;
|
||||
private boolean hasDocValues = randomBoolean();
|
||||
|
||||
public void testRewriteToContainNestedFieldNoQuery() {
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.querydsl.query;
|
||||
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.sort.NestedSortBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
|
@ -53,15 +52,14 @@ public class BoolQueryTests extends ESTestCase {
|
|||
|
||||
public void testAddNestedField() {
|
||||
Query q = boolQueryWithoutNestedChildren();
|
||||
assertSame(q, q.addNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5), DocValueFieldsContext.USE_DEFAULT_FORMAT,
|
||||
randomBoolean()));
|
||||
assertSame(q, q.addNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5), null, randomBoolean()));
|
||||
|
||||
String path = randomAlphaOfLength(5);
|
||||
String field = randomAlphaOfLength(5);
|
||||
q = boolQueryWithNestedChildren(path, field);
|
||||
String newField = randomAlphaOfLength(5);
|
||||
boolean hasDocValues = randomBoolean();
|
||||
Query rewritten = q.addNestedField(path, newField, DocValueFieldsContext.USE_DEFAULT_FORMAT, hasDocValues);
|
||||
Query rewritten = q.addNestedField(path, newField, null, hasDocValues);
|
||||
assertNotSame(q, rewritten);
|
||||
assertTrue(rewritten.containsNestedField(path, newField));
|
||||
}
|
||||
|
@ -87,7 +85,7 @@ public class BoolQueryTests extends ESTestCase {
|
|||
|
||||
private Query boolQueryWithNestedChildren(String path, String field) {
|
||||
NestedQuery match = new NestedQuery(SourceTests.randomSource(), path,
|
||||
singletonMap(field, new SimpleImmutableEntry<>(randomBoolean(), DocValueFieldsContext.USE_DEFAULT_FORMAT)),
|
||||
singletonMap(field, new SimpleImmutableEntry<>(randomBoolean(), null)),
|
||||
new MatchAll(SourceTests.randomSource()));
|
||||
Query matchAll = new MatchAll(SourceTests.randomSource());
|
||||
Query left;
|
||||
|
@ -108,4 +106,4 @@ public class BoolQueryTests extends ESTestCase {
|
|||
new ExistsQuery(new Source(1, 1, StringUtils.EMPTY), "f1"),
|
||||
new ExistsQuery(new Source(1, 7, StringUtils.EMPTY), "f2")).toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.sql.querydsl.query;
|
||||
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.sort.NestedSortBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
|
@ -54,8 +53,7 @@ public class LeafQueryTests extends ESTestCase {
|
|||
public void testAddNestedField() {
|
||||
Query query = new DummyLeafQuery(SourceTests.randomSource());
|
||||
// Leaf queries don't contain nested fields.
|
||||
assertSame(query, query.addNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5), DocValueFieldsContext.USE_DEFAULT_FORMAT,
|
||||
randomBoolean()));
|
||||
assertSame(query, query.addNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5), null, randomBoolean()));
|
||||
}
|
||||
|
||||
public void testEnrichNestedSort() {
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.querydsl.query;
|
||||
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.sort.NestedSortBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
|
@ -45,7 +44,7 @@ public class NestedQueryTests extends ESTestCase {
|
|||
int size = between(0, 5);
|
||||
Map<String, Map.Entry<Boolean, String>> fields = new HashMap<>(size);
|
||||
while (fields.size() < size) {
|
||||
fields.put(randomAlphaOfLength(5), new SimpleImmutableEntry<>(randomBoolean(), DocValueFieldsContext.USE_DEFAULT_FORMAT));
|
||||
fields.put(randomAlphaOfLength(5), new SimpleImmutableEntry<>(randomBoolean(), null));
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
@ -80,18 +79,18 @@ public class NestedQueryTests extends ESTestCase {
|
|||
NestedQuery q = randomNestedQuery(0);
|
||||
for (String field : q.fields().keySet()) {
|
||||
// add does nothing if the field is already there
|
||||
assertSame(q, q.addNestedField(q.path(), field, DocValueFieldsContext.USE_DEFAULT_FORMAT, randomBoolean()));
|
||||
assertSame(q, q.addNestedField(q.path(), field, null, randomBoolean()));
|
||||
String otherPath = randomValueOtherThan(q.path(), () -> randomAlphaOfLength(5));
|
||||
// add does nothing if the path doesn't match
|
||||
assertSame(q, q.addNestedField(otherPath, randomAlphaOfLength(5), DocValueFieldsContext.USE_DEFAULT_FORMAT, randomBoolean()));
|
||||
assertSame(q, q.addNestedField(otherPath, randomAlphaOfLength(5), null, randomBoolean()));
|
||||
}
|
||||
|
||||
// if the field isn't in the list then add rewrites to a query with all the old fields and the new one
|
||||
String newField = randomValueOtherThanMany(q.fields()::containsKey, () -> randomAlphaOfLength(5));
|
||||
boolean hasDocValues = randomBoolean();
|
||||
NestedQuery added = (NestedQuery) q.addNestedField(q.path(), newField, DocValueFieldsContext.USE_DEFAULT_FORMAT, hasDocValues);
|
||||
NestedQuery added = (NestedQuery) q.addNestedField(q.path(), newField, null, hasDocValues);
|
||||
assertNotSame(q, added);
|
||||
assertThat(added.fields(), hasEntry(newField, new SimpleImmutableEntry<>(hasDocValues, DocValueFieldsContext.USE_DEFAULT_FORMAT)));
|
||||
assertThat(added.fields(), hasEntry(newField, new SimpleImmutableEntry<>(hasDocValues, null)));
|
||||
assertTrue(added.containsNestedField(q.path(), newField));
|
||||
for (Map.Entry<String, Map.Entry<Boolean, String>> field : q.fields().entrySet()) {
|
||||
assertThat(added.fields(), hasEntry(field.getKey(), field.getValue()));
|
||||
|
@ -133,8 +132,8 @@ public class NestedQueryTests extends ESTestCase {
|
|||
|
||||
public void testToString() {
|
||||
NestedQuery q = new NestedQuery(new Source(1, 1, StringUtils.EMPTY), "a.b",
|
||||
singletonMap("f", new SimpleImmutableEntry<>(true, DocValueFieldsContext.USE_DEFAULT_FORMAT)),
|
||||
singletonMap("f", new SimpleImmutableEntry<>(true, null)),
|
||||
new MatchAll(new Source(1, 1, StringUtils.EMPTY)));
|
||||
assertEquals("NestedQuery@1:2[a.b.{f=true=use_field_mapping}[MatchAll@1:2[]]]", q.toString());
|
||||
assertEquals("NestedQuery@1:2[a.b.{f=true=null}[MatchAll@1:2[]]]", q.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
---
|
||||
"Translate SQL":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
version: " - 6.99.99"
|
||||
reason: Triggers warnings before 7.0
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
|
@ -29,7 +28,6 @@
|
|||
excludes: []
|
||||
docvalue_fields:
|
||||
- field: int
|
||||
format: use_field_mapping
|
||||
sort:
|
||||
- int:
|
||||
order: asc
|
||||
|
|
Loading…
Reference in New Issue