SQL: Implement TIME_PARSE function for parsing strings into TIME values (#55223) (#57342)

Implement TIME_PARSE(<time_str>, <pattern_str>) function
which allows to parse a time string according to the specified
pattern into a time object. The patterns allowed are those of
java.time.format.DateTimeFormatter.

Closes #54963

Co-authored-by: Andrei Stefan <astefan@users.noreply.github.com>
Co-authored-by: Patrick Jiang(白泽) <patrickjiang0530@gmail.com>

(cherry picked from commit 1fe1188d449cad7d0782a202372edc52a4014135)
This commit is contained in:
Marios Trivyzas 2020-05-29 15:48:37 +02:00 committed by GitHub
parent 6b0d707671
commit b2651323fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 601 additions and 111 deletions

View File

@ -496,6 +496,56 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[dateTimeParse3]
--------------------------------------------------
====
[[sql-functions-datetime-timeparse]]
==== `TIME_PARSE`
.Synopsis:
[source, sql]
--------------------------------------------------
TIME_PARSE(
string_exp, <1>
string_exp) <2>
--------------------------------------------------
*Input*:
<1> time expression as a string
<2> parsing pattern
*Output*: time
*Description*: Returns a time by parsing the 1st argument using the format specified in the 2nd argument. The parsing
format pattern used is the one from
https://docs.oracle.com/en/java/javase/14/docs/api/java.base/java/time/format/DateTimeFormatter.html[`java.time.format.DateTimeFormatter`].
If any of the two arguments is `null` or an empty string `null` is returned.
[NOTE]
If the parsing pattern contains date units (e.g. 'dd/MM/uuuu', 'dd-MM HH:mm:ss', etc.) an error is returned
as the function needs to return a value of `time` type which will contain only time.
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[timeParse1]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[timeParse2]
--------------------------------------------------
[NOTE]
====
If timezone is not specified in the time string expression and the parsing pattern,
the resulting `time` will have the offset of the time zone specified by the user through the
<<sql-rest-fields-timezone,`time_zone`>>/<<jdbc-cfg-timezone,`timezone`>> REST/driver
parameters at the Unix epoch date (`1970-01-01`) with no conversion applied.
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[timeParse3]
--------------------------------------------------
====
[[sql-functions-datetime-part]]
==== `DATE_PART/DATEPART`

View File

@ -55,6 +55,7 @@
** <<sql-functions-datetime-diff>>
** <<sql-functions-datetime-datetimeformat>>
** <<sql-functions-datetime-datetimeparse>>
** <<sql-functions-datetime-timeparse>>
** <<sql-functions-datetime-part>>
** <<sql-functions-datetime-trunc>>
** <<sql-functions-datetime-day>>
@ -91,7 +92,7 @@
** <<sql-functions-math-log10>>
** <<sql-functions-math-pi>>
** <<sql-functions-math-power>>
** <<sql-functions-math-random>>
** <<sql-functions-math-random>>
** <<sql-functions-math-round>>
** <<sql-functions-math-sign>>
** <<sql-functions-math-sqrt>>

View File

@ -62,6 +62,8 @@ public final class CsvTestUtils {
csvProperties.setProperty("charset", "UTF-8");
csvProperties.setProperty("separator", "|");
csvProperties.setProperty("trimValues", "true");
// Format to read and compare java.sql.Time values
csvProperties.setProperty("timeFormat", "HH:mm:ss.SSSX");
Tuple<String, String> resultsAndTypes = extractColumnTypesAndStripCli(csvTest.earlySchema, csvTest.expectedResults);
csvProperties.setProperty("columnTypes", resultsAndTypes.v2());
Reader reader = new StringReader(resultsAndTypes.v1());

View File

@ -89,6 +89,7 @@ TIMESTAMPADD |SCALAR
TIMESTAMPDIFF |SCALAR
TIMESTAMP_ADD |SCALAR
TIMESTAMP_DIFF |SCALAR
TIME_PARSE |SCALAR
TODAY |SCALAR
WEEK |SCALAR
WEEK_OF_YEAR |SCALAR

View File

@ -285,6 +285,7 @@ TIMESTAMPADD |SCALAR
TIMESTAMPDIFF |SCALAR
TIMESTAMP_ADD |SCALAR
TIMESTAMP_DIFF |SCALAR
TIME_PARSE |SCALAR
TODAY |SCALAR
WEEK |SCALAR
WEEK_OF_YEAR |SCALAR
@ -2803,6 +2804,42 @@ schema::datetime:ts
// end::dateTimeParse3
;
timeParse1
schema::time:time
// tag::timeParse1
SELECT TIME_PARSE('10:20:30.123', 'HH:mm:ss.SSS') AS "time";
time
---------------
10:20:30.123Z
// end::timeParse1
;
timeParse2
schema::time:time
// tag::timeParse2
SELECT TIME_PARSE('10:20:30-01:00', 'HH:mm:ssXXX') AS "time";
time
---------------
11:20:30.000Z
// end::timeParse2
;
timeParse3-Ignore
schema::time:time
// tag::timeParse3
{
"query" : "SELECT DATETIME_PARSE('10:20:30', 'HH:mm:ss') AS \"time\"",
"time_zone" : "Europe/Athens"
}
time
------------------------------------
10:20:30.000+02:00
// end::timeParse3
;
datePartDateTimeYears
// tag::datePartDateTimeYears
SELECT DATE_PART('year', '2019-09-22T11:22:33.123Z'::datetime) AS "years";

View File

@ -104,3 +104,74 @@ SELECT MAX(salary) FROM test_emp GROUP BY CURRENT_TIME;
---------------
74999
;
selectTimeParse
schema::tp_time1:time|tp_time2:time
SELECT TIME_PARSE('11:22:33', 'HH:mm:ss') AS tp_time1,
TIME_PARSE('11:22:33 -0533', 'HH:mm:ss xx') AS tp_time2;
tp_time1 | tp_time2
----------------------------+----------------------------
11:22:33.000Z | 16:55:33.000Z
;
selectTimeParseWithField
schema::@timestamp:ts|tp_time:time
SELECT "@timestamp", TIME_PARSE(DATETIME_FORMAT("@timestamp", 'HH mm SSS ss'), 'HH mm SSS ss') AS tp_time
FROM logs WHERE client_ip = '10.0.1.13' ORDER BY "@timestamp" desc;
@timestamp | tp_time
-------------------------+-------------------------
2017-11-10 20:36:15.000Z | 20:36:15.000Z
2017-11-10 20:36:07.000Z | 20:36:07.000Z
2017-11-10 20:35:55.000Z | 20:35:55.000Z
2017-11-10 20:35:54.000Z | 20:35:54.000Z
2017-11-10 17:54:43.000Z | 17:54:43.000Z
;
timeParseWhere
schema::@timestamp:ts|tp_time:time
SELECT "@timestamp", TIME_PARSE(DATETIME_FORMAT("@timestamp", 'HH.mm.ss'), 'HH.mm.ss') AS tp_time
FROM logs WHERE "@timestamp" > '2017-11-10'::date and tp_time = '21:15:39'::time ORDER BY id;
@timestamp | tp_time
-------------------------+------------------------
2017-11-10 21:15:39.000Z | 21:15:39.000Z
2017-11-10 21:15:39.000Z | 21:15:39.000Z
2017-11-10 21:15:39.000Z | 21:15:39.000Z
;
timeParseOrderBy
schema::@timestamp:ts|tp_time:time
SELECT "@timestamp", TIME_PARSE(DATETIME_FORMAT("@timestamp", 'HH:mm:ss.SSS'), 'HH:mm:ss.SSS') AS tp_time
FROM logs ORDER BY 2 DESC, 1 DESC LIMIT 5;
@timestamp | tp_time
-------------------------+-------------------------
2017-11-10 23:56:36.000Z | 23:56:36.000Z
2017-11-10 23:43:10.000Z | 23:43:10.000Z
2017-11-10 23:36:41.000Z | 23:36:41.000Z
2017-11-10 23:36:33.000Z | 23:36:33.000Z
2017-11-10 23:36:32.000Z | 23:36:32.000Z
;
timeParseGroupBy
schema::count:l|df_tp_time:s
SELECT count(*) AS count, CAST(TIME_PARSE(DATETIME_FORMAT("@timestamp", 'HH:mm:ss'), 'HH:mm:ss') AS VARCHAR) AS df_tp_time
FROM logs GROUP BY df_tp_time ORDER BY 1 DESC, 2 DESC NULLS LAST LIMIT 1;
count | df_tp_time
-------+---------------
7 | 20:35:57.000Z
;
timeParseHaving
schema::max:ts|tt_month:s
SELECT MAX("@timestamp") AS max, DATETIME_FORMAT("@timestamp", 'MM') AS tt_month FROM logs GROUP BY tt_month
HAVING TIME_PARSE(DATETIME_FORMAT(MAX("@timestamp"), 'HH:mm:ss'), 'HH:mm:ss') > '21:15:39'::time ORDER BY 1 DESC NULLS LAST;
max | tt_month
-------------------------+---------------
2017-11-10 23:56:36.000Z | 11
;

View File

@ -49,6 +49,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthName
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Quarter;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.SecondOfMinute;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeParse;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.WeekOfYear;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year;
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StAswkt;
@ -182,6 +183,7 @@ public class SqlFunctionRegistry extends FunctionRegistry {
def(MonthName.class, MonthName::new, "MONTH_NAME", "MONTHNAME"),
def(MonthOfYear.class, MonthOfYear::new, "MONTH_OF_YEAR", "MONTH"),
def(SecondOfMinute.class, SecondOfMinute::new, "SECOND_OF_MINUTE", "SECOND"),
def(TimeParse.class, TimeParse::new, "TIME_PARSE"),
def(Quarter.class, Quarter::new, "QUARTER"),
def(Year.class, Year::new, "YEAR"),
def(WeekOfYear.class, WeekOfYear::new, "WEEK_OF_YEAR", "WEEK")

View File

@ -78,17 +78,17 @@ public final class Processors {
entries.add(new Entry(Processor.class, NullIfProcessor.NAME, NullIfProcessor::new));
// datetime
entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
entries.add(new Entry(Processor.class, TimeProcessor.NAME, TimeProcessor::new));
entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new));
entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new));
entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new));
entries.add(new Entry(Processor.class, DateAddProcessor.NAME, DateAddProcessor::new));
entries.add(new Entry(Processor.class, DateDiffProcessor.NAME, DateDiffProcessor::new));
entries.add(new Entry(Processor.class, DatePartProcessor.NAME, DatePartProcessor::new));
entries.add(new Entry(Processor.class, DateTimeFormatProcessor.NAME, DateTimeFormatProcessor::new));
entries.add(new Entry(Processor.class, DateTimeParseProcessor.NAME, DateTimeParseProcessor::new));
entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new));
entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new));
entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new));
entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new));
entries.add(new Entry(Processor.class, TimeProcessor.NAME, TimeProcessor::new));
// math
entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new));
entries.add(new Entry(Processor.class, BinaryOptionalMathProcessor.NAME, BinaryOptionalMathProcessor::new));

View File

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Expressions;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import java.time.ZoneId;
import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
public abstract class BaseDateTimeParseFunction extends BinaryDateTimeFunction {
public BaseDateTimeParseFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId) {
super(source, datePart, timestamp, zoneId);
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
resolution = isString(right(), sourceText(), Expressions.ParamOrdinal.SECOND);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
public Object fold() {
return parser().parse(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe timestamp, Pipe pattern, ZoneId zoneId) {
return new DateTimeParsePipe(source(), this, timestamp, pattern, zoneId, parser());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, ctorForInfo(), left(), right(), zoneId());
}
protected abstract Parser parser();
protected abstract NodeInfo.NodeCtor3<Expression, Expression, ZoneId, BaseDateTimeParseFunction> ctorForInfo();
}

View File

@ -30,7 +30,7 @@ public abstract class BinaryDateTimeProcessor extends BinaryProcessor {
}
@Override
protected void doWrite(StreamOutput out) {
protected void doWrite(StreamOutput out) throws IOException {
}
ZoneId zoneId() {

View File

@ -6,9 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Expressions;
import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.type.DataType;
@ -16,54 +14,38 @@ import org.elasticsearch.xpack.ql.type.DataTypes;
import java.time.ZoneId;
import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser.DATE_TIME;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
public class DateTimeParse extends BinaryDateTimeFunction {
public class DateTimeParse extends BaseDateTimeParseFunction {
public DateTimeParse(Source source, Expression timestamp, Expression pattern, ZoneId zoneId) {
super(source, timestamp, pattern, zoneId);
}
@Override
protected Parser parser() {
return DATE_TIME;
}
@Override
protected NodeInfo.NodeCtor3<Expression, Expression, ZoneId, BaseDateTimeParseFunction> ctorForInfo() {
return DateTimeParse::new;
}
@Override
public DataType dataType() {
return DataTypes.DATETIME;
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
resolution = isString(right(), sourceText(), Expressions.ParamOrdinal.SECOND);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
protected BinaryScalarFunction replaceChildren(Expression timestamp, Expression pattern) {
return new DateTimeParse(source(), timestamp, pattern, zoneId());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DateTimeParse::new, left(), right(), zoneId());
}
@Override
protected String scriptMethodName() {
return "dateTimeParse";
}
@Override
public Object fold() {
return DateTimeParseProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe timestamp, Pipe pattern, ZoneId zoneId) {
return new DateTimeParsePipe(source(), this, timestamp, pattern, zoneId);
}
}

View File

@ -8,29 +8,58 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import java.time.ZoneId;
import java.util.Objects;
public class DateTimeParsePipe extends BinaryDateTimePipe {
private final Parser parser;
public DateTimeParsePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
public DateTimeParsePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId, Parser parser) {
super(source, expression, left, right, zoneId);
this.parser = parser;
}
@Override
protected NodeInfo<DateTimeParsePipe> info() {
return NodeInfo.create(this, DateTimeParsePipe::new, expression(), left(), right(), zoneId());
return NodeInfo.create(this, DateTimeParsePipe::new, expression(), left(), right(), zoneId(), parser);
}
@Override
protected DateTimeParsePipe replaceChildren(Pipe left, Pipe right) {
return new DateTimeParsePipe(source(), expression(), left, right, zoneId());
return new DateTimeParsePipe(source(), expression(), left, right, zoneId(), parser);
}
@Override
protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) {
return new DateTimeParseProcessor(left, right, zoneId);
return new DateTimeParseProcessor(left, right, zoneId, parser);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), this.parser);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
DateTimeParsePipe that = (DateTimeParsePipe) o;
return super.equals(o) && this.parser == that.parser;
}
public Parser parser() {
return parser;
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.ql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.util.DateUtils;
@ -13,63 +14,85 @@ import org.elasticsearch.xpack.sql.util.DateUtils;
import java.io.IOException;
import java.time.DateTimeException;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalQuery;
import java.util.Locale;
import java.util.Objects;
import java.util.function.BiFunction;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
public class DateTimeParseProcessor extends BinaryDateTimeProcessor {
public enum Parser {
DATE_TIME("datetime", ZonedDateTime::from, LocalDateTime::from),
TIME("time", OffsetTime::from, LocalTime::from);
private final BiFunction<String, String, TemporalAccessor> parser;
private final String parseType;
Parser(String parseType, TemporalQuery<?>... queries) {
this.parseType = parseType;
this.parser = (timestampStr, pattern) -> DateTimeFormatter.ofPattern(pattern, Locale.ROOT)
.parseBest(timestampStr, queries);
}
public Object parse(Object timestamp, Object pattern, ZoneId zoneId) {
if (timestamp == null || pattern == null) {
return null;
}
if (timestamp instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", timestamp);
}
if (pattern instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", pattern);
}
if (((String) timestamp).isEmpty() || ((String) pattern).isEmpty()) {
return null;
}
try {
TemporalAccessor ta = parser.apply((String) timestamp, (String) pattern);
return DateUtils.atTimeZone(ta, zoneId);
} catch (IllegalArgumentException | DateTimeException e) {
String msg = e.getMessage();
if (msg.contains("Unable to convert parsed text using any of the specified queries")) {
msg = format(null, "Unable to convert parsed text into [{}]", this.parseType);
}
throw new SqlIllegalArgumentException(
"Invalid {} string [{}] or pattern [{}] is received; {}",
this.parseType,
timestamp,
pattern,
msg
);
}
}
}
private final Parser parser;
public static final String NAME = "dtparse";
public DateTimeParseProcessor(Processor source1, Processor source2, ZoneId zoneId) {
public DateTimeParseProcessor(Processor source1, Processor source2, ZoneId zoneId, Parser parser) {
super(source1, source2, zoneId);
this.parser = parser;
}
public DateTimeParseProcessor(StreamInput in) throws IOException {
super(in);
this.parser = in.readEnum(Parser.class);
}
/**
* Used in Painless scripting
*/
public static Object process(Object timestampStr, Object pattern, ZoneId zoneId) {
if (timestampStr == null || pattern == null) {
return null;
}
if (timestampStr instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", timestampStr);
}
if (pattern instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", pattern);
}
if (((String) timestampStr).isEmpty() || ((String) pattern).isEmpty()) {
return null;
}
try {
TemporalAccessor ta = DateTimeFormatter.ofPattern((String) pattern, Locale.ROOT)
.parseBest((String) timestampStr, ZonedDateTime::from, LocalDateTime::from);
if (ta instanceof LocalDateTime) {
return DateUtils.atTimeZone((LocalDateTime) ta, zoneId);
} else {
return ((ZonedDateTime) ta).withZoneSameInstant(zoneId);
}
} catch (IllegalArgumentException | DateTimeException e) {
String msg = e.getMessage();
if (msg.contains("Unable to convert parsed text using any of the specified queries")) {
msg = "Unable to convert parsed text into [datetime]";
}
throw new SqlIllegalArgumentException(
"Invalid date/time string [{}] or pattern [{}] is received; {}",
timestampStr,
pattern,
msg
);
}
@Override
public void doWrite(StreamOutput out) throws IOException {
out.writeEnum(parser);
}
@Override
@ -79,12 +102,12 @@ public class DateTimeParseProcessor extends BinaryDateTimeProcessor {
@Override
protected Object doProcess(Object timestamp, Object pattern) {
return process(timestamp, pattern, zoneId());
return this.parser.parse(timestamp, pattern, zoneId());
}
@Override
public int hashCode() {
return Objects.hash(left(), right());
return Objects.hash(super.hashCode(), parser);
}
@Override
@ -98,6 +121,10 @@ public class DateTimeParseProcessor extends BinaryDateTimeProcessor {
}
DateTimeParseProcessor other = (DateTimeParseProcessor) obj;
return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right());
return super.equals(other) && Objects.equals(parser, other.parser);
}
public Parser parser() {
return parser;
}
}

View File

@ -0,0 +1,51 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.type.DataType;
import org.elasticsearch.xpack.sql.type.SqlDataTypes;
import java.time.ZoneId;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser.TIME;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
public class TimeParse extends BaseDateTimeParseFunction {
public TimeParse(Source source, Expression timestamp, Expression pattern, ZoneId zoneId) {
super(source, timestamp, pattern, zoneId);
}
@Override
protected Parser parser() {
return TIME;
}
@Override
protected NodeInfo.NodeCtor3<Expression, Expression, ZoneId, BaseDateTimeParseFunction> ctorForInfo() {
return TimeParse::new;
}
@Override
public DataType dataType() {
return SqlDataTypes.TIME;
}
@Override
protected BinaryScalarFunction replaceChildren(Expression timestamp, Expression pattern) {
return new TimeParse(source(), timestamp, pattern, zoneId());
}
@Override
protected String scriptMethodName() {
return "timeParse";
}
}

View File

@ -15,10 +15,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiffP
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFormatProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor;
@ -289,9 +289,13 @@ public class InternalSqlScriptUtils extends InternalQlScriptUtils {
}
public static Object dateTimeParse(String dateField, String pattern, String tzId) {
return DateTimeParseProcessor.process(dateField, pattern, ZoneId.of(tzId));
return Parser.DATE_TIME.parse(dateField, pattern, ZoneId.of(tzId));
}
public static Object timeParse(String dateField, String pattern, String tzId) {
return Parser.TIME.parse(dateField, pattern, ZoneId.of(tzId));
}
public static ZonedDateTime asDateTime(Object dateTime) {
return (ZonedDateTime) asDateTime(dateTime, false);
}

View File

@ -18,11 +18,13 @@ import org.elasticsearch.xpack.sql.type.SqlDataTypeConverter;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.temporal.TemporalAccessor;
import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE;
import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME;
@ -209,4 +211,32 @@ public final class DateUtils {
public static ZonedDateTime atTimeZone(LocalDateTime ldt, ZoneId zoneId) {
return ZonedDateTime.ofInstant(ldt, zoneId.getRules().getValidOffsets(ldt).get(0), zoneId);
}
public static OffsetTime atTimeZone(OffsetTime ot, ZoneId zoneId) {
LocalDateTime ldt = ot.atDate(EPOCH).toLocalDateTime();
return ot.withOffsetSameInstant(zoneId.getRules().getValidOffsets(ldt).get(0));
}
public static OffsetTime atTimeZone(LocalTime lt, ZoneId zoneId) {
LocalDateTime ldt = lt.atDate(EPOCH);
return OffsetTime.of(lt, zoneId.getRules().getValidOffsets(ldt).get(0));
}
public static ZonedDateTime atTimeZone(ZonedDateTime zdt, ZoneId zoneId) {
return zdt.withZoneSameInstant(zoneId);
}
public static TemporalAccessor atTimeZone(TemporalAccessor ta, ZoneId zoneId) {
if (ta instanceof LocalDateTime) {
return atTimeZone((LocalDateTime) ta, zoneId);
} else if (ta instanceof ZonedDateTime){
return atTimeZone((ZonedDateTime)ta, zoneId);
} else if (ta instanceof OffsetTime) {
return atTimeZone((OffsetTime) ta, zoneId);
} else if (ta instanceof LocalTime) {
return atTimeZone((LocalTime) ta, zoneId);
} else {
return ta;
}
}
}

View File

@ -135,6 +135,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS
Integer datePart(String, Object, String)
String dateTimeFormat(Object, String, String)
def dateTimeParse(String, String, String)
def timeParse(String, String, String)
IntervalDayTime intervalDayTime(String, String)
IntervalYearMonth intervalYearMonth(String, String)
ZonedDateTime asDateTime(Object)

View File

@ -15,6 +15,7 @@ import org.elasticsearch.xpack.ql.tree.AbstractNodeTestCase;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.tree.SourceTests;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@ -23,16 +24,26 @@ import java.util.function.Function;
import static org.elasticsearch.xpack.ql.expression.Expressions.pipe;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.ql.tree.SourceTests.randomSource;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePipe, Pipe> {
public static DateTimeParsePipe randomDateTimeParsePipe() {
return (DateTimeParsePipe) new DateTimeParse(
randomSource(),
randomStringLiteral(),
randomStringLiteral(),
randomZone()
).makePipe();
List<Pipe> functions = new ArrayList<>();
functions.add(new DateTimeParse(
randomSource(),
randomStringLiteral(),
randomStringLiteral(),
randomZone()
).makePipe());
functions.add(new TimeParse(
randomSource(),
randomStringLiteral(),
randomStringLiteral(),
randomZone()
).makePipe());
return (DateTimeParsePipe) randomFrom(functions);
}
@Override
@ -51,13 +62,29 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
DateTimeParsePipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTimeParsePipeExpression);
DateTimeParsePipe newB = new DateTimeParsePipe(b1.source(), newExpression, b1.left(), b1.right(), b1.zoneId());
DateTimeParsePipe newB = new DateTimeParsePipe(
b1.source(),
newExpression,
b1.left(),
b1.right(),
b1.zoneId(),
b1.parser());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
DateTimeParsePipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new DateTimeParsePipe(newLoc, b2.expression(), b2.left(), b2.right(), b2.zoneId());
newB = new DateTimeParsePipe(newLoc, b2.expression(), b2.left(), b2.right(), b2.zoneId(), b2.parser());
assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
DateTimeParsePipe b3 = randomInstance();
Parser newPr = randomValueOtherThan(b3.parser(), () -> randomFrom(Parser.values()));
newB = new DateTimeParsePipe(b3.source(), b3.expression(), b3.left(), b3.right(), b3.zoneId(), newPr);
assertEquals(newB, b3.transformPropertiesOnly(v -> Objects.equals(v, b3.parser()) ? newPr : v, Parser.class));
DateTimeParsePipe b4 = randomInstance();
ZoneId newZI = randomValueOtherThan(b4.zoneId(), ESTestCase::randomZone);
newB = new DateTimeParsePipe(b3.source(), b4.expression(), b4.left(), b4.right(), newZI, b4.parser());
assertEquals(newB, b4.transformPropertiesOnly(v -> Objects.equals(v, b4.zoneId()) ? newZI : v, ZoneId.class));
}
@Override
@ -65,7 +92,13 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
DateTimeParsePipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomDatetimeLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomStringLiteral)));
DateTimeParsePipe newB = new DateTimeParsePipe(b.source(), b.expression(), b.left(), b.right(), b.zoneId());
DateTimeParsePipe newB = new DateTimeParsePipe(
b.source(),
b.expression(),
b.left(),
b.right(),
b.zoneId(),
b.parser());
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
@ -95,7 +128,8 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
f.right(),
f.zoneId()
f.zoneId(),
f.parser()
)
);
randoms.add(
@ -104,7 +138,8 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
f.zoneId()
f.zoneId(),
f.parser()
)
);
randoms.add(
@ -113,7 +148,8 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
f.expression(),
f.left(),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
f.parser()
)
);
randoms.add(
@ -122,7 +158,18 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
f.parser()
)
);
randoms.add(
f -> new DateTimeParsePipe(
f.source(),
f.expression(),
f.left(),
f.right(),
f.zoneId(),
randomValueOtherThan(f.parser(), () -> randomFrom(Parser.values()))
)
);
@ -131,6 +178,12 @@ public class DateTimeParsePipeTests extends AbstractNodeTestCase<DateTimeParsePi
@Override
protected DateTimeParsePipe copy(DateTimeParsePipe instance) {
return new DateTimeParsePipe(instance.source(), instance.expression(), instance.left(), instance.right(), instance.zoneId());
return new DateTimeParsePipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.zoneId(),
instance.parser());
}
}

View File

@ -12,13 +12,16 @@ import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeParseProcessor.Parser;
import java.time.ZoneId;
import java.time.ZoneOffset;
import static org.elasticsearch.xpack.ql.expression.Literal.NULL;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.time;
public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestCase<DateTimeParseProcessor> {
@ -26,7 +29,8 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
return new DateTimeParseProcessor(
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
randomZone()
randomZone(),
randomFrom(Parser.values())
);
}
@ -40,16 +44,23 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
return DateTimeParseProcessor::new;
}
@Override
protected ZoneId instanceZoneId(DateTimeParseProcessor instance) {
return instance.zoneId();
}
@Override
protected DateTimeParseProcessor mutateInstance(DateTimeParseProcessor instance) {
Parser replaced = randomValueOtherThan(instance.parser(), () -> randomFrom(Parser.values()));
return new DateTimeParseProcessor(
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
randomZone()
randomZone(),
replaced
);
}
public void testInvalidInputs() {
public void testDateTimeInvalidInputs() {
SqlIllegalArgumentException siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new DateTimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null)
@ -67,7 +78,7 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
() -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid date/time string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i",
"Invalid datetime string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i",
siae.getMessage()
);
@ -76,7 +87,7 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
() -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("MM/dd"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid date/time string [2020-04-07] or pattern [MM/dd] is received; Text '2020-04-07' could not be parsed at index 2",
"Invalid datetime string [2020-04-07] or pattern [MM/dd] is received; Text '2020-04-07' could not be parsed at index 2",
siae.getMessage()
);
@ -85,7 +96,7 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
() -> new DateTimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid date/time string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [datetime]",
"Invalid datetime string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [datetime]",
siae.getMessage()
);
@ -94,20 +105,69 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
Source.EMPTY, l("10:20:30.123456789"), l("HH:mm:ss.SSSSSSSSS"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid date/time string [10:20:30.123456789] or pattern [HH:mm:ss.SSSSSSSSS] is received; "
"Invalid datetime string [10:20:30.123456789] or pattern [HH:mm:ss.SSSSSSSSS] is received; "
+ "Unable to convert parsed text into [datetime]",
siae.getMessage()
);
}
public void testTimeInvalidInputs() {
SqlIllegalArgumentException siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new TimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals("A string is required; received [10]", siae.getMessage());
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new TimeParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals("A string is required; received [20]", siae.getMessage());
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new TimeParse(Source.EMPTY, l("11:04:07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid time string [11:04:07] or pattern [invalid] is received; Unknown pattern letter: i",
siae.getMessage()
);
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new TimeParse(Source.EMPTY, l("11:04:07"), l("HH:mm"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid time string [11:04:07] or pattern [HH:mm] is received; " +
"Text '11:04:07' could not be parsed, unparsed text found at index 5",
siae.getMessage()
);
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new TimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals(
"Invalid time string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [time]",
siae.getMessage()
);
}
public void testWithNulls() {
// DateTimeParse
assertNull(new DateTimeParse(Source.EMPTY, randomStringLiteral(), NULL, randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateTimeParse(Source.EMPTY, randomStringLiteral(), l(""), randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateTimeParse(Source.EMPTY, NULL, randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateTimeParse(Source.EMPTY, l(""), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null));
// TimeParse
assertNull(new TimeParse(Source.EMPTY, randomStringLiteral(), NULL, randomZone()).makePipe().asProcessor().process(null));
assertNull(new TimeParse(Source.EMPTY, randomStringLiteral(), l(""), randomZone()).makePipe().asProcessor().process(null));
assertNull(new TimeParse(Source.EMPTY, NULL, randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertNull(new TimeParse(Source.EMPTY, l(""), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null));
}
public void testParsing() {
// DateTimeParse
ZoneId zoneId = ZoneId.of("America/Sao_Paulo");
assertEquals(
dateTime(2020, 4, 7, 10, 20, 30, 123000000, zoneId),
@ -125,10 +185,23 @@ public class DateTimeParseProcessorTests extends AbstractSqlWireSerializingTestC
);
assertEquals(
dateTime(2020, 4, 7, 1, 50, 30, 123456789, zoneId),
new DateTimeParse(Source.EMPTY, l("07/04/2020 10:20:30.123456789 +05:30"), l("dd/MM/uuuu HH:mm:ss.SSSSSSSSS zz"), zoneId)
new DateTimeParse(Source.EMPTY, l("07/04/2020 10:20:30.123456789 +0530"), l("dd/MM/uuuu HH:mm:ss.SSSSSSSSS xx"), zoneId)
.makePipe()
.asProcessor()
.process(null)
);
// TimeParse
assertEquals(
time(10, 20, 30, 123000000, zoneId),
new TimeParse(Source.EMPTY, l("10:20:30.123"), l("HH:mm:ss.SSS"), zoneId).makePipe()
.asProcessor()
.process(null)
);
assertEquals(
time(10, 20, 30, 123456789, ZoneOffset.of("+05:30"), zoneId),
new TimeParse(Source.EMPTY, l("10:20:30.123456789 +0530"), l("HH:mm:ss.SSSSSSSSS xx"), zoneId).makePipe()
.asProcessor()
.process(null)
);
}
}

View File

@ -11,10 +11,14 @@ import org.elasticsearch.xpack.sql.util.DateUtils;
import java.time.Clock;
import java.time.Duration;
import java.time.OffsetTime;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import static org.elasticsearch.xpack.sql.util.DateUtils.EPOCH;
public class DateTimeTestUtils {
private DateTimeTestUtils() {}
@ -44,6 +48,22 @@ public class DateTimeTestUtils {
return OffsetTime.of(hour, minute, second, nano, ZoneOffset.UTC);
}
public static OffsetTime time(int hour, int minute, int second, int nano, ZoneOffset offset) {
return OffsetTime.of(hour, minute, second, nano, offset);
}
public static OffsetTime time(int hour, int minute, int second, int nano, ZoneOffset offset, ZoneId zoneId) {
OffsetTime ot = OffsetTime.of(hour, minute, second, nano, offset);
LocalDateTime ldt = ot.atDate(EPOCH).toLocalDateTime();
return ot.withOffsetSameInstant(zoneId.getRules().getValidOffsets(ldt).get(0));
}
public static OffsetTime time(int hour, int minute, int second, int nano, ZoneId zoneId) {
LocalTime lt = LocalTime.of(hour, minute, second, nano);
LocalDateTime ldt = lt.atDate(EPOCH);
return OffsetTime.of(lt, zoneId.getRules().getValidOffsets(ldt).get(0));
}
static ZonedDateTime nowWithMillisResolution() {
Clock millisResolutionClock = Clock.tick(Clock.systemUTC(), Duration.ofMillis(1));
return ZonedDateTime.now(millisResolutionClock);