Add nanosecond field mapper (#37755)

This adds a dedicated field mapper that supports nanosecond resolution -
at the price of a reduced date range.

When using the date field mapper, the time is stored as milliseconds since the epoch
in a long in lucene. This field mapper stores the time in nanoseconds
since the epoch - which means its range is much smaller, ranging roughly from
1970 to 2262.

Note that aggregations will still be in milliseconds.
However docvalue fields will have full nanosecond resolution

Relates #27330
This commit is contained in:
Alexander Reelsen 2019-02-04 11:31:16 +01:00 committed by GitHub
parent 15510da2af
commit 87f3579125
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 725 additions and 48 deletions

View File

@ -10,6 +10,7 @@ document:
string:: <<text,`text`>> and <<keyword,`keyword`>>
<<number>>:: `long`, `integer`, `short`, `byte`, `double`, `float`, `half_float`, `scaled_float`
<<date>>:: `date`
<<date_nanos>>:: `date_nanos`
<<boolean>>:: `boolean`
<<binary>>:: `binary`
<<range>>:: `integer_range`, `float_range`, `long_range`, `double_range`, `date_range`
@ -78,6 +79,8 @@ include::types/boolean.asciidoc[]
include::types/date.asciidoc[]
include::types/date_nanos.asciidoc[]
include::types/geo-point.asciidoc[]
include::types/geo-shape.asciidoc[]

View File

@ -0,0 +1,99 @@
[[date_nanos]]
=== date_nanos datatype
This datatype is an addition to the `date` datatype. However there is an
important distinction between the two. The existing `date` datatype stores
dates in millisecond resolution. The `date_nanos` data type stores dates
in nanosecond resolution, which limits its range of dates from roughly
1970 to 2262, as dates are still stored as a long representing nanoseconds
since the epoch.
Queries on nanoseconds are internally converted to range queries on this long
representation, and the result of aggregations and stored fields is converted
back to a string depending on the date format that is associated with the field.
Date formats can be customised, but if no `format` is specified then it uses
the default:
"strict_date_optional_time||epoch_millis"
This means that it will accept dates with optional timestamps, which conform
to the formats supported by
<<strict-date-time,`strict_date_optional_time`>> including up to nine second
fractionals or milliseconds-since-the-epoch (thus losing precision on the
nano second part).
For instance:
[source,js]
--------------------------------------------------
PUT my_index?include_type_name=true
{
"mappings": {
"_doc": {
"properties": {
"date": {
"type": "date_nanos" <1>
}
}
}
}
}
PUT my_index/_doc/1
{ "date": "2015-01-01" } <2>
PUT my_index/_doc/2
{ "date": "2015-01-01T12:10:30.123456789Z" } <3>
PUT my_index/_doc/3
{ "date": 1420070400 } <4>
GET my_index/_search
{
"sort": { "date": "asc"} <5>
}
GET my_index/_search
{
"script_fields" : {
"my_field" : {
"script" : {
"lang" : "painless",
"source" : "doc['date'].date.nanos" <6>
}
}
}
}
GET my_index/_search
{
"docvalue_fields" : [
{
"field" : "my_ip_field",
"format": "strict_date_time" <7>
}
]
}
--------------------------------------------------
// CONSOLE
<1> The `date` field uses the default `format`.
<2> This document uses a plain date.
<3> This document includes a time.
<4> This document uses milliseconds-since-the-epoch.
<5> Note that the `sort` values that are returned are all in
nanoseconds-since-the-epoch.
<6> Access the nanosecond part of the date in a script
<7> Use doc value fields, which can be formatted in nanosecond
resolution
You can also specify multiple date formats separated by `||`. The
same mapping parameters than with the `date` field can be used.
[[date-nanos-limitations]]
==== Limitations
Aggregations are still on millisecond resolution, even when using a
`date_nanos` field.

View File

@ -14,6 +14,8 @@ setup:
type: double
geo:
type: geo_point
date:
type: date
object:
type: object
properties:
@ -45,6 +47,8 @@ setup:
type: keyword
number:
type: double
date:
type: date
geo:
type: geo_point
object:
@ -77,6 +81,8 @@ setup:
type: keyword
number:
type: long
date:
type: date
geo:
type: keyword
object:
@ -104,7 +110,7 @@ setup:
- do:
field_caps:
index: 'test1,test2,test3'
fields: [text, keyword, number, geo]
fields: [text, keyword, number, date, geo]
- match: {fields.text.text.searchable: true}
- match: {fields.text.text.aggregatable: false}
@ -126,6 +132,11 @@ setup:
- match: {fields.number.long.indices: ["test3"]}
- is_false: fields.number.long.non_searchable_indices
- is_false: fields.number.long.non_aggregatable_indices
- match: {fields.date.date.searchable: true}
- match: {fields.date.date.aggregatable: true}
- is_false: fields.date.date.indices
- is_false: fields.date.date.non_searchable_indices
- is_false: fields.date.date.non_aggregatable_indices
- match: {fields.geo.geo_point.searchable: true}
- match: {fields.geo.geo_point.aggregatable: true}
- match: {fields.geo.geo_point.indices: ["test1", "test2"]}
@ -137,6 +148,33 @@ setup:
- is_false: fields.geo.keyword.non_searchable_indices
- is_false: fields.geo.keyword.on_aggregatable_indices
---
"Get date_nanos field caps":
- skip:
version: " - 6.99.99"
reason: date_nanos field mapping type has been introcued in 7.0
- do:
indices.create:
include_type_name: false
index: test_nanos
body:
mappings:
properties:
date_nanos:
type: date_nanos
- do:
field_caps:
index: 'test_nanos'
fields: [date_nanos]
- match: {fields.date_nanos.date_nanos.searchable: true}
- match: {fields.date_nanos.date_nanos.aggregatable: true}
- is_false: fields.date_nanos.date_nanos.indices
- is_false: fields.date_nanos.date_nanos.non_searchable_indices
- is_false: fields.date_nanos.date_nanos.non_aggregatable_indices
---
"Get leaves field caps":
- do:

View File

@ -0,0 +1,161 @@
setup:
- skip:
version: " - 6.99.99"
reason: "Implemented in 7.0"
- do:
indices.create:
index: date_ns
body:
settings:
number_of_shards: 3
number_of_replicas: 0
mappings:
properties:
date:
type: date_nanos
field:
type: long
- do:
indices.create:
index: date_ms
body:
settings:
number_of_shards: 3
number_of_replicas: 0
mappings:
properties:
date:
type: date
field:
type: long
---
"test sorting against date_nanos only fields":
- do:
bulk:
refresh: true
body:
- '{ "index" : { "_index" : "date_ns", "_id" : "first" } }'
# millis [1540815132987] to nanos [1540815132987654321]
- '{"date" : "2018-10-29T12:12:12.123456789Z", "field" : 1 }'
- '{ "index" : { "_index" : "date_ns", "_id" : "second" } }'
# millis [1540815132123] to nanos [1540815132123456789]
- '{"date" : "2018-10-29T12:12:12.987654321Z", "field" : 2 }'
- do:
search:
rest_total_hits_as_int: true
index: date_ns*
body:
sort: [ { "date": "desc" } ]
- match: { hits.total: 2 }
- length: { hits.hits: 2 }
- match: { hits.hits.0._id: "second" }
- match: { hits.hits.0.sort: [1540815132987654321] }
- match: { hits.hits.1._id: "first" }
- match: { hits.hits.1.sort: [1540815132123456789] }
- do:
search:
rest_total_hits_as_int: true
index: date_ns*
body:
sort: [ { "date": "asc" } ]
- match: { hits.total: 2 }
- length: { hits.hits: 2 }
- match: { hits.hits.0._id: "first" }
- match: { hits.hits.0.sort: [1540815132123456789] }
- match: { hits.hits.1._id: "second" }
- match: { hits.hits.1.sort: [1540815132987654321] }
---
"date_nanos requires dates after 1970 and before 2262":
- do:
bulk:
refresh: true
body:
- '{ "index" : { "_index" : "date_ns", "_id" : "date_ns_1" } }'
- '{"date" : "1969-10-28T12:12:12.123456789Z" }'
- '{ "index" : { "_index" : "date_ns", "_id" : "date_ns_2" } }'
- '{"date" : "2263-10-29T12:12:12.123456789Z" }'
- match: { errors: true }
- match: { items.0.index.status: 400 }
- match: { items.0.index.error.type: mapper_parsing_exception }
- match: { items.0.index.error.caused_by.reason: "date[1969-10-28T12:12:12.123456789Z] is before the epoch in 1970 and cannot be stored in nanosecond resolution" }
- match: { items.1.index.status: 400 }
- match: { items.1.index.error.type: mapper_parsing_exception }
- match: { items.1.index.error.caused_by.reason: "date[2263-10-29T12:12:12.123456789Z] is after 2262-04-11T23:47:16.854775807 and cannot be stored in nanosecond resolution" }
---
"doc value fields are working as expected across date and date_nanos fields":
- do:
bulk:
refresh: true
body:
- '{ "index" : { "_index" : "date_ns", "_id" : "date_ns_1" } }'
- '{"date" : "2018-10-29T12:12:12.123456789Z", "field" : 1 }'
- '{ "index" : { "_index" : "date_ms", "_id" : "date_ms_1" } }'
- '{"date" : "2018-10-29T12:12:12.987Z" }'
- do:
search:
rest_total_hits_as_int: true
index: date*
body:
docvalue_fields: [ { "field": "date", "format" : "strict_date_optional_time" }, { "field": "date", "format": "epoch_millis" }, { "field" : "date", "format": "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSX" } ]
sort: [ { "date": "desc" } ]
- match: { hits.total: 2 }
- length: { hits.hits: 2 }
- match: { hits.hits.0._id: "date_ns_1" }
- match: { hits.hits.1._id: "date_ms_1" }
- match: { hits.hits.0.fields.date: [ "2018-10-29T12:12:12.123Z", "1540815132123.456789", "2018-10-29T12:12:12.123456789Z" ] }
- match: { hits.hits.1.fields.date: [ "2018-10-29T12:12:12.987Z", "1540815132987", "2018-10-29T12:12:12.987000000Z" ] }
---
"date histogram aggregation with date and date_nanos mapping":
- do:
bulk:
refresh: true
body:
- '{ "index" : { "_index" : "date_ns", "_id" : "date_ns_1" } }'
- '{"date" : "2018-10-29T12:12:12.123456789Z" }'
- '{ "index" : { "_index" : "date_ms", "_id" : "date_ms_1" } }'
- '{"date" : "2018-10-29T12:12:12.987Z" }'
- '{ "index" : { "_index" : "date_ns", "_id" : "date_ns_2" } }'
- '{"date" : "2018-10-30T12:12:12.123456789Z" }'
- '{ "index" : { "_index" : "date_ms", "_id" : "date_ms_2" } }'
- '{"date" : "2018-10-30T12:12:12.987Z" }'
- do:
search:
rest_total_hits_as_int: true
index: date*
body:
size: 0
aggs:
date:
date_histogram:
field: date
interval: 1d
- match: { hits.total: 4 }
- length: { aggregations.date.buckets: 2 }
- match: { aggregations.date.buckets.0.key: 1540771200000 }
- match: { aggregations.date.buckets.0.key_as_string: "2018-10-29T00:00:00.000Z" }
- match: { aggregations.date.buckets.0.doc_count: 2 }
- match: { aggregations.date.buckets.1.key: 1540857600000 }
- match: { aggregations.date.buckets.1.key_as_string: "2018-10-30T00:00:00.000Z" }
- match: { aggregations.date.buckets.1.doc_count: 2 }

View File

@ -23,6 +23,7 @@ import org.apache.logging.log4j.LogManager;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Collections;
@ -77,4 +78,65 @@ public class DateUtils {
}
return ZoneId.of(zoneId).normalized();
}
private static final Instant MAX_NANOSECOND_INSTANT = Instant.parse("2262-04-11T23:47:16.854775807Z");
/**
* convert a java time instant to a long value which is stored in lucene
* the long value resembles the nanoseconds since the epoch
*
* @param instant the instant to convert
* @return the nano seconds and seconds as a single long
*/
public static long toLong(Instant instant) {
if (instant.isBefore(Instant.EPOCH)) {
throw new IllegalArgumentException("date[" + instant + "] is before the epoch in 1970 and cannot be " +
"stored in nanosecond resolution");
}
if (instant.isAfter(MAX_NANOSECOND_INSTANT)) {
throw new IllegalArgumentException("date[" + instant + "] is after 2262-04-11T23:47:16.854775807 and cannot be " +
"stored in nanosecond resolution");
}
return instant.getEpochSecond() * 1_000_000_000 + instant.getNano();
}
/**
* convert a long value to a java time instant
* the long value resembles the nanoseconds since the epoch
*
* @param nanoSecondsSinceEpoch the nanoseconds since the epoch
* @return the instant resembling the specified date
*/
public static Instant toInstant(long nanoSecondsSinceEpoch) {
if (nanoSecondsSinceEpoch < 0) {
throw new IllegalArgumentException("nanoseconds are [" + nanoSecondsSinceEpoch + "] are before the epoch in 1970 and cannot " +
"be processed in nanosecond resolution");
}
if (nanoSecondsSinceEpoch == 0) {
return Instant.EPOCH;
}
long seconds = nanoSecondsSinceEpoch / 1_000_000_000;
long nanos = nanoSecondsSinceEpoch % 1_000_000_000;
return Instant.ofEpochSecond(seconds, nanos);
}
/**
* Convert a nanosecond timestamp in milliseconds
*
* @param nanoSecondsSinceEpoch the nanoseconds since the epoch
* @return the milliseconds since the epoch
*/
public static long toMilliSeconds(long nanoSecondsSinceEpoch) {
if (nanoSecondsSinceEpoch < 0) {
throw new IllegalArgumentException("nanoseconds are [" + nanoSecondsSinceEpoch + "] are before the epoch in 1970 and will " +
"be converted to milliseconds");
}
if (nanoSecondsSinceEpoch == 0) {
return 0;
}
return nanoSecondsSinceEpoch / 1_000_000;
}
}

View File

@ -28,6 +28,7 @@ public interface IndexNumericFieldData extends IndexFieldData<AtomicNumericField
INT(false),
LONG(false),
DATE(false),
DATE_NANOSECONDS(false),
HALF_FLOAT(true),
FLOAT(true),
DOUBLE(true);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
import java.io.IOException;
@ -132,6 +133,7 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Dates extends ScriptDocValues<JodaCompatibleZonedDateTime> {
private final SortedNumericDocValues in;
private final boolean isNanos;
/**
* Values wrapped in {@link java.time.ZonedDateTime} objects.
@ -139,11 +141,9 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
private JodaCompatibleZonedDateTime[] dates;
private int count;
/**
* Standard constructor.
*/
public Dates(SortedNumericDocValues in) {
public Dates(SortedNumericDocValues in, boolean isNanos) {
this.in = in;
this.isNanos = isNanos;
}
/**
@ -195,10 +195,14 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
dates = new JodaCompatibleZonedDateTime[count];
}
for (int i = 0; i < count; ++i) {
if (isNanos) {
dates[i] = new JodaCompatibleZonedDateTime(DateUtils.toInstant(in.nextValue()), ZoneOffset.UTC);
} else {
dates[i] = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC);
}
}
}
}
public static final class Doubles extends ScriptDocValues<Double> {

View File

@ -50,8 +50,12 @@ abstract class AtomicLongFieldData implements AtomicNumericFieldData {
@Override
public final ScriptDocValues<?> getScriptValues() {
switch (numericType) {
// for now, dates and nanoseconds are treated the same, which also means, that the precision is only on millisecond level
case DATE:
return new ScriptDocValues.Dates(getLongValues());
return new ScriptDocValues.Dates(getLongValues(), false);
case DATE_NANOSECONDS:
assert this instanceof SortedNumericDVIndexFieldData.NanoSecondFieldData;
return new ScriptDocValues.Dates(((SortedNumericDVIndexFieldData.NanoSecondFieldData) this).getLongValuesAsNanos(), true);
case BOOLEAN:
return new ScriptDocValues.Booleans(getLongValues());
default:

View File

@ -31,7 +31,9 @@ import org.apache.lucene.search.SortedNumericSelector;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
@ -134,11 +136,59 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
return new SortedNumericFloatFieldData(reader, field);
case DOUBLE:
return new SortedNumericDoubleFieldData(reader, field);
case DATE_NANOSECONDS:
return new NanoSecondFieldData(reader, field, numericType);
default:
return new SortedNumericLongFieldData(reader, field, numericType);
}
}
/**
* A small helper class that can be configured to load nanosecond field data either in nanosecond resolution retaining the original
* values or in millisecond resolution converting the nanosecond values to milliseconds
*/
public final class NanoSecondFieldData extends AtomicLongFieldData {
private final LeafReader reader;
private final String fieldName;
NanoSecondFieldData(LeafReader reader, String fieldName, NumericType numericType) {
super(0L, numericType);
this.reader = reader;
this.fieldName = fieldName;
}
@Override
public SortedNumericDocValues getLongValues() {
final SortedNumericDocValues dv = getLongValuesAsNanos();
return new AbstractSortedNumericDocValues() {
@Override
public boolean advanceExact(int target) throws IOException {
return dv.advanceExact(target);
}
@Override
public long nextValue() throws IOException {
return DateUtils.toMilliSeconds(dv.nextValue());
}
@Override
public int docValueCount() {
return dv.docValueCount();
}
};
}
public SortedNumericDocValues getLongValuesAsNanos() {
try {
return DocValues.getSortedNumeric(reader, fieldName);
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
}
/**
* FieldData implementation for integral types.
* <p>

View File

@ -42,6 +42,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -53,6 +54,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Iterator;
@ -61,8 +63,10 @@ import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.time.DateUtils.toLong;
/** A {@link FieldMapper} for dates. */
public class DateFieldMapper extends FieldMapper {
public final class DateFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "date";
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
@ -71,11 +75,62 @@ public class DateFieldMapper extends FieldMapper {
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
}
public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
public long convert(Instant instant) {
return instant.toEpochMilli();
}
public Instant toInstant(long value) {
return Instant.ofEpochMilli(value);
}
},
NANOSECONDS("date_nanos", NumericType.DATE_NANOSECONDS) {
public long convert(Instant instant) {
return toLong(instant);
}
public Instant toInstant(long value) {
return DateUtils.toInstant(value);
}
};
private final String type;
private final NumericType numericType;
Resolution(String type, NumericType numericType) {
this.type = type;
this.numericType = numericType;
}
public String type() {
return type;
}
NumericType numericType() {
return numericType;
}
public abstract long convert(Instant instant);
public abstract Instant toInstant(long value);
public static Resolution ofOrdinal(int ord) {
for (Resolution resolution : values()) {
if (ord == resolution.ordinal()) {
return resolution;
}
}
throw new IllegalArgumentException("unknown resolution ordinal [" + ord + "]");
}
}
public static class Builder extends FieldMapper.Builder<Builder, DateFieldMapper> {
private Boolean ignoreMalformed;
private Explicit<String> format = new Explicit<>(DEFAULT_DATE_TIME_FORMATTER.pattern(), false);
private Locale locale;
private Resolution resolution = Resolution.MILLISECONDS;
public Builder(String name) {
super(name, new DateFieldType(), new DateFieldType());
@ -121,6 +176,11 @@ public class DateFieldMapper extends FieldMapper {
return this;
}
Builder withResolution(Resolution resolution) {
this.resolution = resolution;
return this;
}
public boolean isFormatterSet() {
return format.explicit();
}
@ -135,6 +195,8 @@ public class DateFieldMapper extends FieldMapper {
if (hasPatternChanged || Objects.equals(builder.locale, dateTimeFormatter.locale()) == false) {
fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale));
}
fieldType().setResolution(resolution);
}
@Override
@ -147,12 +209,16 @@ public class DateFieldMapper extends FieldMapper {
public static class TypeParser implements Mapper.TypeParser {
public TypeParser() {
private final Resolution resolution;
public TypeParser(Resolution resolution) {
this.resolution = resolution;
}
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(name);
builder.withResolution(resolution);
TypeParsers.parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -184,18 +250,21 @@ public class DateFieldMapper extends FieldMapper {
public static final class DateFieldType extends MappedFieldType {
protected DateFormatter dateTimeFormatter;
protected DateMathParser dateMathParser;
protected Resolution resolution;
DateFieldType() {
super();
setTokenized(false);
setHasDocValues(true);
setOmitNorms(true);
setDateTimeFormatter(DEFAULT_DATE_TIME_FORMATTER);
setDateTimeFormatter(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
setResolution(Resolution.MILLISECONDS);
}
DateFieldType(DateFieldType other) {
super(other);
setDateTimeFormatter(other.dateTimeFormatter);
setResolution(other.resolution);
}
@Override
@ -207,17 +276,17 @@ public class DateFieldMapper extends FieldMapper {
public boolean equals(Object o) {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
return Objects.equals(dateTimeFormatter, that.dateTimeFormatter);
return Objects.equals(dateTimeFormatter, that.dateTimeFormatter) && Objects.equals(resolution, that.resolution);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), dateTimeFormatter);
return Objects.hash(super.hashCode(), dateTimeFormatter, resolution);
}
@Override
public String typeName() {
return CONTENT_TYPE;
return resolution.type();
}
@Override
@ -230,6 +299,9 @@ public class DateFieldMapper extends FieldMapper {
if (Objects.equals(dateTimeFormatter.locale(), other.dateTimeFormatter.locale()) == false) {
conflicts.add("mapper [" + name() + "] has different [locale] values");
}
if (Objects.equals(resolution.type(), other.resolution.type()) == false) {
conflicts.add("mapper [" + name() + "] cannot change between milliseconds and nanoseconds");
}
}
public DateFormatter dateTimeFormatter() {
@ -242,12 +314,17 @@ public class DateFieldMapper extends FieldMapper {
this.dateMathParser = dateTimeFormatter.toDateMathParser();
}
void setResolution(Resolution resolution) {
checkIfFrozen();
this.resolution = resolution;
}
protected DateMathParser dateMathParser() {
return dateMathParser;
}
long parse(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value)).toInstant().toEpochMilli();
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value)).toInstant());
}
@Override
@ -283,7 +360,7 @@ public class DateFieldMapper extends FieldMapper {
if (lowerTerm == null) {
l = Long.MIN_VALUE;
} else {
l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser, context);
l = parseToLong(lowerTerm, !includeLower, timeZone, parser, context);
if (includeLower == false) {
++l;
}
@ -291,7 +368,7 @@ public class DateFieldMapper extends FieldMapper {
if (upperTerm == null) {
u = Long.MAX_VALUE;
} else {
u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser, context);
u = parseToLong(upperTerm, includeUpper, timeZone, parser, context);
if (includeUpper == false) {
--u;
}
@ -304,7 +381,7 @@ public class DateFieldMapper extends FieldMapper {
return query;
}
public long parseToMilliseconds(Object value, boolean roundUp,
public long parseToLong(Object value, boolean roundUp,
@Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
@ -317,7 +394,8 @@ public class DateFieldMapper extends FieldMapper {
} else {
strValue = value.toString();
}
return dateParser.parse(strValue, context::nowInMillis, roundUp, zone).toEpochMilli();
Instant instant = dateParser.parse(strValue, context::nowInMillis, roundUp, zone);
return resolution.convert(instant);
}
@Override
@ -330,7 +408,7 @@ public class DateFieldMapper extends FieldMapper {
long fromInclusive = Long.MIN_VALUE;
if (from != null) {
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
fromInclusive = parseToLong(from, !includeLower, timeZone, dateParser, context);
if (includeLower == false) {
if (fromInclusive == Long.MAX_VALUE) {
return Relation.DISJOINT;
@ -341,7 +419,7 @@ public class DateFieldMapper extends FieldMapper {
long toInclusive = Long.MAX_VALUE;
if (to != null) {
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
toInclusive = parseToLong(to, includeUpper, timeZone, dateParser, context);
if (includeUpper == false) {
if (toInclusive == Long.MIN_VALUE) {
return Relation.DISJOINT;
@ -373,7 +451,7 @@ public class DateFieldMapper extends FieldMapper {
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.DATE);
return new DocValuesIndexFieldData.Builder().numericType(resolution.numericType());
}
@Override
@ -382,7 +460,7 @@ public class DateFieldMapper extends FieldMapper {
if (val == null) {
return null;
}
return dateTimeFormatter().formatMillis(val);
return dateTimeFormatter().format(resolution.toInstant(val).atZone(ZoneOffset.UTC));
}
@Override
@ -394,7 +472,9 @@ public class DateFieldMapper extends FieldMapper {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone);
// the resolution here is always set to milliseconds, as aggregations use this formatter mainly and those are always in
// milliseconds. The only special case here is docvalue fields, which are handled somewhere else
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone, Resolution.MILLISECONDS);
}
}

View File

@ -318,7 +318,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
if (originString == null) {
origin = context.nowInMillis();
} else {
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, context);
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToLong(originString, false, null, null, context);
}
if (scaleString == null) {

View File

@ -122,7 +122,10 @@ public class IndicesModule extends AbstractModule {
}
mappers.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser());
mappers.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser());
mappers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());
DateFieldMapper.Resolution milliseconds = DateFieldMapper.Resolution.MILLISECONDS;
mappers.put(milliseconds.type(), new DateFieldMapper.TypeParser(milliseconds));
DateFieldMapper.Resolution nanoseconds = DateFieldMapper.Resolution.NANOSECONDS;
mappers.put(nanoseconds.type(), new DateFieldMapper.TypeParser(nanoseconds));
mappers.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser());
mappers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
mappers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.index.mapper.DateFieldMapper;
import java.io.IOException;
import java.net.InetAddress;
@ -38,7 +39,6 @@ import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Base64;
@ -160,6 +160,15 @@ public interface DocValueFormat extends NamedWriteable {
}
};
static DocValueFormat withNanosecondResolution(final DocValueFormat format) {
if (format instanceof DateTime) {
DateTime dateTime = (DateTime) format;
return new DateTime(dateTime.formatter, dateTime.timeZone, DateFieldMapper.Resolution.NANOSECONDS);
} else {
throw new IllegalArgumentException("trying to convert a known date time formatter to a nanosecond one, wrong field used?");
}
}
final class DateTime implements DocValueFormat {
public static final String NAME = "date_time";
@ -167,11 +176,13 @@ public interface DocValueFormat extends NamedWriteable {
final DateFormatter formatter;
final ZoneId timeZone;
private final DateMathParser parser;
final DateFieldMapper.Resolution resolution;
public DateTime(DateFormatter formatter, ZoneId timeZone) {
public DateTime(DateFormatter formatter, ZoneId timeZone, DateFieldMapper.Resolution resolution) {
this.formatter = formatter;
this.timeZone = Objects.requireNonNull(timeZone);
this.parser = formatter.toDateMathParser();
this.resolution = resolution;
}
public DateTime(StreamInput in) throws IOException {
@ -180,8 +191,10 @@ public interface DocValueFormat extends NamedWriteable {
String zoneId = in.readString();
if (in.getVersion().before(Version.V_7_0_0)) {
this.timeZone = DateUtils.of(zoneId);
this.resolution = DateFieldMapper.Resolution.MILLISECONDS;
} else {
this.timeZone = ZoneId.of(zoneId);
this.resolution = DateFieldMapper.Resolution.ofOrdinal(in.readVInt());
}
}
@ -197,12 +210,13 @@ public interface DocValueFormat extends NamedWriteable {
out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
} else {
out.writeString(timeZone.getId());
out.writeVInt(resolution.ordinal());
}
}
@Override
public String format(long value) {
return formatter.format(Instant.ofEpochMilli(value).atZone(timeZone));
return formatter.format(resolution.toInstant(value).atZone(timeZone));
}
@Override
@ -212,7 +226,7 @@ public interface DocValueFormat extends NamedWriteable {
@Override
public long parseLong(String value, boolean roundUp, LongSupplier now) {
return parser.parse(value, now, roundUp, timeZone).toEpochMilli();
return resolution.convert(parser.parse(value, now, roundUp, timeZone));
}
@Override

View File

@ -42,7 +42,8 @@ public enum ValueType implements Writeable {
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class,
new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC)),
new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC,
DateFieldMapper.Resolution.MILLISECONDS)),
IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP),
// TODO: what is the difference between "number" and "numeric"?
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),

View File

@ -26,6 +26,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.AggregationScript;
@ -132,7 +133,8 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
valueFormat = new DocValueFormat.Decimal(format);
}
if (valueFormat instanceof DocValueFormat.DateTime && format != null) {
valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : ZoneOffset.UTC);
valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : ZoneOffset.UTC,
DateFieldMapper.Resolution.MILLISECONDS);
}
return valueFormat;
}

View File

@ -30,6 +30,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit;
@ -46,6 +47,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import static org.elasticsearch.search.DocValueFormat.withNanosecondResolution;
/**
* Query sub phase which pulls data from doc values
*
@ -92,12 +96,23 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
MappedFieldType fieldType = context.mapperService().fullName(field);
if (fieldType != null) {
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
final boolean isNanosecond;
if (indexFieldData instanceof IndexNumericFieldData) {
isNanosecond = ((IndexNumericFieldData) indexFieldData).getNumericType() == NumericType.DATE_NANOSECONDS;
} else {
isNanosecond = false;
}
final DocValueFormat format;
String formatDesc = fieldAndFormat.format;
if (Objects.equals(formatDesc, USE_DEFAULT_FORMAT)) {
// TODO: Remove in 8.x
formatDesc = null;
}
final DocValueFormat format = fieldType.docValueFormat(formatDesc, null);
if (isNanosecond) {
format = withNanosecondResolution(fieldType.docValueFormat(formatDesc, null));
} else {
format = fieldType.docValueFormat(formatDesc, null);
}
LeafReaderContext subReaderContext = null;
AtomicFieldData data = null;
SortedBinaryDocValues binaryValues = null; // binary / string / ip fields
@ -110,12 +125,20 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
data = indexFieldData.load(subReaderContext);
if (indexFieldData instanceof IndexNumericFieldData) {
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
NumericType numericType = ((IndexNumericFieldData) indexFieldData).getNumericType();
if (numericType.isFloatingPoint()) {
doubleValues = ((AtomicNumericFieldData) data).getDoubleValues();
} else {
// by default nanoseconds are cut to milliseconds within aggregations
// however for doc value fields we need the original nanosecond longs
if (isNanosecond) {
longValues = ((SortedNumericDVIndexFieldData.NanoSecondFieldData) data).getLongValuesAsNanos();
} else {
longValues = ((AtomicNumericFieldData) data).getLongValues();
}
}
} else {
data = indexFieldData.load(subReaderContext);
binaryValues = data.getBytesValues();
}
}

View File

@ -24,16 +24,23 @@ import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.common.time.DateUtils.toInstant;
import static org.elasticsearch.common.time.DateUtils.toLong;
import static org.elasticsearch.common.time.DateUtils.toMilliSeconds;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class DateUtilsTests extends ESTestCase {
private static final Set<String> IGNORE = new HashSet<>(Arrays.asList(
"Eire", "Europe/Dublin" // dublin timezone in joda does not account for DST
));
public void testTimezoneIds() {
assertNull(DateUtils.dateTimeZoneToZoneId(null));
assertNull(DateUtils.zoneIdToDateTimeZone(null));
@ -51,4 +58,60 @@ public class DateUtilsTests extends ESTestCase {
assertNotNull(DateUtils.zoneIdToDateTimeZone(zoneId));
}
}
public void testInstantToLong() {
assertThat(toLong(Instant.EPOCH), is(0L));
Instant instant = createRandomInstant();
long timeSinceEpochInNanos = instant.getEpochSecond() * 1_000_000_000 + instant.getNano();
assertThat(toLong(instant), is(timeSinceEpochInNanos));
}
public void testInstantToLongMin() {
Instant tooEarlyInstant = ZonedDateTime.parse("1677-09-21T00:12:43.145224191Z").toInstant();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> toLong(tooEarlyInstant));
assertThat(e.getMessage(), containsString("is before"));
e = expectThrows(IllegalArgumentException.class, () -> toLong(Instant.EPOCH.minusMillis(1)));
assertThat(e.getMessage(), containsString("is before"));
}
public void testInstantToLongMax() {
Instant tooEarlyInstant = ZonedDateTime.parse("2262-04-11T23:47:16.854775808Z").toInstant();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> toLong(tooEarlyInstant));
assertThat(e.getMessage(), containsString("is after"));
}
public void testLongToInstant() {
assertThat(toInstant(0), is(Instant.EPOCH));
assertThat(toInstant(1), is(Instant.EPOCH.plusNanos(1)));
Instant instant = createRandomInstant();
long nowInNs = toLong(instant);
assertThat(toInstant(nowInNs), is(instant));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> toInstant(-1));
assertThat(e.getMessage(),
is("nanoseconds are [-1] are before the epoch in 1970 and cannot be processed in nanosecond resolution"));
e = expectThrows(IllegalArgumentException.class, () -> toInstant(Long.MIN_VALUE));
assertThat(e.getMessage(),
is("nanoseconds are [" + Long.MIN_VALUE + "] are before the epoch in 1970 and cannot be processed in nanosecond resolution"));
assertThat(toInstant(Long.MAX_VALUE),
is(ZonedDateTime.parse("2262-04-11T23:47:16.854775807Z").toInstant()));
}
public void testNanosToMillis() {
assertThat(toMilliSeconds(0), is(Instant.EPOCH.toEpochMilli()));
Instant instant = createRandomInstant();
long nowInNs = toLong(instant);
assertThat(toMilliSeconds(nowInNs), is(instant.toEpochMilli()));
}
private Instant createRandomInstant() {
long seconds = randomLongBetween(0, Long.MAX_VALUE) / 1_000_000_000L;
long nanos = randomLongBetween(0, 999_999_999L);
return Instant.ofEpochSecond(seconds, nanos);
}
}

View File

@ -171,12 +171,12 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
// we have to normalize the incoming value into milliseconds since it could be literally anything
if (mappedFieldType instanceof DateFieldMapper.DateFieldType) {
fromInMillis = queryBuilder.from() == null ? null :
((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.from(),
((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong(queryBuilder.from(),
queryBuilder.includeLower(),
queryBuilder.getDateTimeZone(),
queryBuilder.getForceDateParser(), context.getQueryShardContext());
toInMillis = queryBuilder.to() == null ? null :
((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.to(),
((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong(queryBuilder.to(),
queryBuilder.includeUpper(),
queryBuilder.getDateTimeZone(),
queryBuilder.getForceDateParser(), context.getQueryShardContext());

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.test.ESTestCase;
import java.time.ZoneOffset;
@ -61,7 +62,7 @@ public class DocValueFormatTests extends ESTestCase {
assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern);
DateFormatter formatter = DateFormatter.forPattern("epoch_second");
DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1));
DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1), Resolution.MILLISECONDS);
out = new BytesStreamOutput();
out.writeNamedWriteable(dateFormat);
in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
@ -69,6 +70,17 @@ public class DocValueFormatTests extends ESTestCase {
assertEquals(DocValueFormat.DateTime.class, vf.getClass());
assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern());
assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone);
assertEquals(Resolution.MILLISECONDS, ((DocValueFormat.DateTime) vf).resolution);
DocValueFormat.DateTime nanosDateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1),Resolution.NANOSECONDS);
out = new BytesStreamOutput();
out.writeNamedWriteable(nanosDateFormat);
in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
vf = in.readNamedWriteable(DocValueFormat.class);
assertEquals(DocValueFormat.DateTime.class, vf.getClass());
assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern());
assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone);
assertEquals(Resolution.NANOSECONDS, ((DocValueFormat.DateTime) vf).resolution);
out = new BytesStreamOutput();
out.writeNamedWriteable(DocValueFormat.GEOHASH);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
@ -64,7 +65,8 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
if (isLong) {
// we use specific format only for date histogram on a long/date field
if (randomBoolean()) {
return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1));
return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1),
DateFieldMapper.Resolution.MILLISECONDS);
} else {
return DocValueFormat.RAW;
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchParseException;
@ -102,7 +103,7 @@ public class ExtendedBoundsTests extends ESTestCase {
null, xContentRegistry(), writableRegistry(), null, null, () -> now, null);
when(context.getQueryShardContext()).thenReturn(qsc);
DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime");
DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC);
DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS);
ExtendedBounds expected = randomParsedExtendedBounds();
ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", context, format);

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -50,6 +51,7 @@ import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
@ -79,6 +81,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -115,6 +118,12 @@ public class SearchFieldsIT extends ESIntegTestCase {
return dates.getValue().toInstant().toEpochMilli();
});
scripts.put("doc['date'].date.nanos", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date");
return DateUtils.toLong(dates.getValue().toInstant());
});
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
scripts.put("_fields._uid.value", vars -> fieldsScript(vars, "_uid"));
scripts.put("_fields._id.value", vars -> fieldsScript(vars, "_id"));
@ -343,6 +352,51 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(6.0));
}
public void testScriptFieldWithNanos() throws Exception {
createIndex("test");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("date").field("type", "date_nanos").endObject()
.endObject().endObject().endObject());
client().admin().indices().preparePutMapping().setType("doc").setSource(mapping, XContentType.JSON).get();
String date = "2019-01-31T10:00:00.123456789Z";
indexRandom(true, false,
client().prepareIndex("test", "doc", "1")
.setSource(jsonBuilder().startObject()
.field("date", "1970-01-01T00:00:00.000Z")
.endObject()),
client().prepareIndex("test", "doc", "2")
.setSource(jsonBuilder().startObject()
.field("date", date)
.endObject())
);
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("date", SortOrder.ASC)
.addScriptField("date1",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()))
.addScriptField("date2",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.nanos", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getAt(0).getId(), is("1"));
assertThat(response.getHits().getAt(0).getFields().get("date1").getValues().get(0), equalTo(0L));
assertThat(response.getHits().getAt(0).getFields().get("date2").getValues().get(0), equalTo(0L));
assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(0L));
assertThat(response.getHits().getAt(1).getId(), is("2"));
Instant instant = ZonedDateTime.parse(date).toInstant();
long dateAsNanos = DateUtils.toLong(instant);
long dateAsMillis = instant.toEpochMilli();
assertThat(response.getHits().getAt(1).getFields().get("date1").getValues().get(0), equalTo(dateAsMillis));
assertThat(response.getHits().getAt(1).getFields().get("date2").getValues().get(0), equalTo(dateAsNanos));
assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo(dateAsNanos));
}
public void testIdBasedScriptFields() throws Exception {
prepareCreate("test").addMapping("type1", "num1", "type=long").get();