SQL: Implement DATETIME_FORMAT function for date/time formatting (#54832) (#54942)

Implement DATETIME_FORMAT(<date/datetime/time>, ) function
which allows for formatting a timestamp to the specified format. The
patterns allowed as those of java.time.format.DateTimeFormatter.

Related to #53714

(cherry picked from commit 72be0b54a9299e87e785469cdc9aafac2a48c046)
This commit is contained in:
Marios Trivyzas 2020-04-08 13:45:47 +02:00 committed by GitHub
parent 0d2195191d
commit 6afd60b082
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 789 additions and 53 deletions

View File

@ -404,6 +404,48 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[dateDiffDateTimeMinutes]
include-tagged::{sql-specs}/docs/docs.csv-spec[dateDiffDateMinutes] include-tagged::{sql-specs}/docs/docs.csv-spec[dateDiffDateMinutes]
-------------------------------------------------- --------------------------------------------------
[[sql-functions-datetime-datetimeformat]]
==== `DATETIME_FORMAT`
.Synopsis:
[source, sql]
--------------------------------------------------
DATETIME_FORMAT(
date_exp/datetime_exp/time_exp, <1>
string_exp) <2>
--------------------------------------------------
*Input*:
<1> date/datetime/time expression
<2> format pattern
*Output*: string
*Description*: Returns the date/datetime/time as a string using the format specified in the 2nd argument. The formatting
pattern used is the one from
https://docs.oracle.com/en/java/javase/14/docs/api/java.base/java/time/format/DateTimeFormatter.html[`java.time.format.DateTimeFormatter`].
If any of the two arguments is `null` or the pattern is an empty string `null` is returned.
NOTE::
If the 1st argument is of type `time`, then pattern specified by the 2nd argument cannot contain date related units
(e.g. 'dd', 'MM', 'YYYY', etc.). If it contains such units an error is returned.
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateTimeFormatDate]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateTimeFormatDateTime]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateTimeFormatTime]
--------------------------------------------------
[[sql-functions-datetime-part]] [[sql-functions-datetime-part]]
==== `DATE_PART/DATEPART` ==== `DATE_PART/DATEPART`

View File

@ -53,6 +53,7 @@
** <<sql-functions-current-timestamp>> ** <<sql-functions-current-timestamp>>
** <<sql-functions-datetime-add>> ** <<sql-functions-datetime-add>>
** <<sql-functions-datetime-diff>> ** <<sql-functions-datetime-diff>>
** <<sql-functions-datetime-datetimeformat>>
** <<sql-functions-datetime-part>> ** <<sql-functions-datetime-part>>
** <<sql-functions-datetime-trunc>> ** <<sql-functions-datetime-trunc>>
** <<sql-functions-datetime-day>> ** <<sql-functions-datetime-day>>

View File

@ -44,6 +44,7 @@ CURTIME |SCALAR
DATEADD |SCALAR DATEADD |SCALAR
DATEDIFF |SCALAR DATEDIFF |SCALAR
DATEPART |SCALAR DATEPART |SCALAR
DATETIME_FORMAT |SCALAR
DATETRUNC |SCALAR DATETRUNC |SCALAR
DATE_ADD |SCALAR DATE_ADD |SCALAR
DATE_DIFF |SCALAR DATE_DIFF |SCALAR

View File

@ -476,6 +476,110 @@ null | 430
F | 391 F | 391
; ;
selectDateTimeFormat
schema::df_date:s|df_datetime:s|df_time:s
SELECT DATETIME_FORMAT('2020-04-05T11:22:33.123Z'::date, 'dd/MM/YYYY HH:mm:ss.SSS') AS df_date,
DATETIME_FORMAT('2020-04-05T11:22:33.123Z'::datetime, 'dd/MM/YYYY HH:mm:ss.SS') AS df_datetime,
DATETIME_FORMAT('11:22:33.123456789Z'::time, 'HH:mm:ss.SS') AS df_time;
df_date | df_datetime | df_time
------------------------+------------------------+----------------
05/04/2020 00:00:00.000 | 05/04/2020 11:22:33.12 | 11:22:33.12
;
selectDateTimeFormatWithField
schema::birth_date:ts|df_birth_date1:s|df_birth_date2:s
SELECT birth_date, DATETIME_FORMAT(birth_date, 'MM/dd/YYYY') AS df_birth_date1, DATETIME_FORMAT(birth_date, concat(gender, 'M/dd')) AS df_birth_date2
FROM test_emp WHERE gender = 'M' AND emp_no BETWEEN 10037 AND 10052 ORDER BY emp_no;
birth_date | df_birth_date1 | df_birth_date2
-------------------------+----------------+----------------
1963-07-22 00:00:00.000Z | 07/22/1963 | 07/22
1960-07-20 00:00:00.000Z | 07/20/1960 | 07/20
1959-10-01 00:00:00.000Z | 10/01/1959 | 10/01
null | null | null
null | null | null
null | null | null
null | null | null
null | null | null
1958-05-21 00:00:00.000Z | 05/21/1958 | 05/21
1953-07-28 00:00:00.000Z | 07/28/1953 | 07/28
1961-02-26 00:00:00.000Z | 02/26/1961 | 02/26
;
dateTimeFormatWhere
schema::birth_date:ts|df_birth_date:s
SELECT birth_date, DATETIME_FORMAT(birth_date, 'MM') AS df_birth_date FROM test_emp
WHERE DATETIME_FORMAT(birth_date, 'MM')::integer > 10 ORDER BY emp_no LIMIT 10;
birth_date | df_birth_date
-------------------------+---------------
1959-12-03 00:00:00.000Z | 12
1953-11-07 00:00:00.000Z | 11
1952-12-24 00:00:00.000Z | 12
1963-11-26 00:00:00.000Z | 11
1956-12-13 00:00:00.000Z | 12
1956-11-14 00:00:00.000Z | 11
1962-12-29 00:00:00.000Z | 12
1961-11-02 00:00:00.000Z | 11
1952-11-13 00:00:00.000Z | 11
1962-11-26 00:00:00.000Z | 11
;
dateTimeFormatOrderBy
schema::birth_date:ts|df_birth_date:s
SELECT birth_date, DATETIME_FORMAT(birth_date, 'MM/dd/YYYY') AS df_birth_date FROM test_emp ORDER BY 2 DESC NULLS LAST LIMIT 10;
birth_date | df_birth_date
-------------------------+---------------
1962-12-29 00:00:00.000Z | 12/29/1962
1959-12-25 00:00:00.000Z | 12/25/1959
1952-12-24 00:00:00.000Z | 12/24/1952
1960-12-17 00:00:00.000Z | 12/17/1960
1956-12-13 00:00:00.000Z | 12/13/1956
1959-12-03 00:00:00.000Z | 12/03/1959
1957-12-03 00:00:00.000Z | 12/03/1957
1963-11-26 00:00:00.000Z | 11/26/1963
1962-11-26 00:00:00.000Z | 11/26/1962
1962-11-19 00:00:00.000Z | 11/19/1962
;
dateTimeFormatGroupBy
schema::count:l|df_birth_date:s
SELECT count(*) AS count, DATETIME_FORMAT(birth_date, 'MM') AS df_birth_date FROM test_emp GROUP BY df_birth_date ORDER BY 1 DESC, 2 DESC;
count | df_birth_date
-------+---------------
10 | 09
10 | 05
10 | null
9 | 10
9 | 07
8 | 11
8 | 04
8 | 02
7 | 12
7 | 06
6 | 08
6 | 01
2 | 03
;
dateTimeFormatHaving
schema::max:ts|df_birth_date:s
SELECT MAX(birth_date) AS max, DATETIME_FORMAT(birth_date, 'MM') AS df_birth_date FROM test_emp GROUP BY df_birth_date
HAVING DATETIME_FORMAT(MAX(birth_date), 'dd')::integer > 20 ORDER BY 1 DESC;
max | df_birth_date
-------------------------+---------------
1963-11-26 00:00:00.000Z | 11
1963-07-22 00:00:00.000Z | 07
1963-03-21 00:00:00.000Z | 03
1962-12-29 00:00:00.000Z | 12
1961-05-30 00:00:00.000Z | 05
1961-02-26 00:00:00.000Z | 02
;
selectDateTruncWithDateTime selectDateTruncWithDateTime
schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s
SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min, SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min,

View File

@ -240,6 +240,7 @@ CURTIME |SCALAR
DATEADD |SCALAR DATEADD |SCALAR
DATEDIFF |SCALAR DATEDIFF |SCALAR
DATEPART |SCALAR DATEPART |SCALAR
DATETIME_FORMAT |SCALAR
DATETRUNC |SCALAR DATETRUNC |SCALAR
DATE_ADD |SCALAR DATE_ADD |SCALAR
DATE_DIFF |SCALAR DATE_DIFF |SCALAR
@ -2547,6 +2548,35 @@ SELECT DATE_DIFF('minutes', '2019-09-04'::date, '2015-08-17T22:33:11.567Z'::date
// end::dateDiffDateMinutes // end::dateDiffDateMinutes
; ;
dateTimeFormatDate
// tag::dateTimeFormatDate
SELECT DATETIME_FORMAT(CAST('2020-04-05' AS DATE), 'dd/MM/YYYY') AS "date";
date
------------------
05/04/2020
// end::dateTimeFormatDate
;
dateTimeFormatDateTime
// tag::dateTimeFormatDateTime
SELECT DATETIME_FORMAT(CAST('2020-04-05T11:22:33.987654' AS DATETIME), 'dd/MM/YYYY HH:mm:ss.SS') AS "datetime";
datetime
------------------
05/04/2020 11:22:33.98
// end::dateTimeFormatDateTime
;
dateTimeFormatTime
// tag::dateTimeFormatTime
SELECT DATETIME_FORMAT(CAST('11:22:33.987' AS TIME), 'HH mm ss.S') AS "time";
time
------------------
11 22 33.9
// end::dateTimeFormatTime
;
datePartDateTimeYears datePartDateTimeYears
// tag::datePartDateTimeYears // tag::datePartDateTimeYears

View File

@ -32,6 +32,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentTi
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiff; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiff;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFormat;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth;
@ -168,6 +169,7 @@ public class SqlFunctionRegistry extends FunctionRegistry {
def(DateAdd.class, DateAdd::new, "DATEADD", "DATE_ADD", "TIMESTAMPADD", "TIMESTAMP_ADD"), def(DateAdd.class, DateAdd::new, "DATEADD", "DATE_ADD", "TIMESTAMPADD", "TIMESTAMP_ADD"),
def(DateDiff.class, DateDiff::new, "DATEDIFF", "DATE_DIFF", "TIMESTAMPDIFF", "TIMESTAMP_DIFF"), def(DateDiff.class, DateDiff::new, "DATEDIFF", "DATE_DIFF", "TIMESTAMPDIFF", "TIMESTAMP_DIFF"),
def(DatePart.class, DatePart::new, "DATEPART", "DATE_PART"), def(DatePart.class, DatePart::new, "DATEPART", "DATE_PART"),
def(DateTimeFormat.class, DateTimeFormat::new, "DATETIME_FORMAT"),
def(DateTrunc.class, DateTrunc::new, "DATETRUNC", "DATE_TRUNC"), def(DateTrunc.class, DateTrunc::new, "DATETRUNC", "DATE_TRUNC"),
def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"), def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"),
def(IsoDayOfWeek.class, IsoDayOfWeek::new, "ISO_DAY_OF_WEEK", "ISODAYOFWEEK", "ISODOW", "IDOW"), def(IsoDayOfWeek.class, IsoDayOfWeek::new, "ISO_DAY_OF_WEEK", "ISODAYOFWEEK", "ISODOW", "IDOW"),

View File

@ -15,6 +15,7 @@ import org.elasticsearch.xpack.ql.type.Converter;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiffProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiffProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFormatProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor;
@ -84,6 +85,7 @@ public final class Processors {
entries.add(new Entry(Processor.class, DateDiffProcessor.NAME, DateDiffProcessor::new)); entries.add(new Entry(Processor.class, DateDiffProcessor.NAME, DateDiffProcessor::new));
entries.add(new Entry(Processor.class, DatePartProcessor.NAME, DatePartProcessor::new)); entries.add(new Entry(Processor.class, DatePartProcessor.NAME, DatePartProcessor::new));
entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new)); entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new));
entries.add(new Entry(Processor.class, DateTimeFormatProcessor.NAME, DateTimeFormatProcessor::new));
// math // math
entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new)); entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new));
entries.add(new Entry(Processor.class, BinaryOptionalMathProcessor.NAME, BinaryOptionalMathProcessor::new)); entries.add(new Entry(Processor.class, BinaryOptionalMathProcessor.NAME, BinaryOptionalMathProcessor::new));

View File

@ -0,0 +1,92 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Expressions;
import org.elasticsearch.xpack.ql.tree.Source;
import java.time.ZoneId;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
/**
* Abstract super class for functions like {@link DateTrunc} and {@link DatePart}
* which require an argument denoting a unit of date/time.
*/
public abstract class BinaryDateTimeDatePartFunction extends BinaryDateTimeFunction {
public BinaryDateTimeDatePartFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId) {
super(source, datePart, timestamp, zoneId);
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
if (left().foldable()) {
String datePartValue = (String) left().fold();
if (datePartValue != null && resolveDateTimeField(datePartValue) == false) {
List<String> similar = findSimilarDateTimeFields(datePartValue);
if (similar.isEmpty()) {
return new TypeResolution(
format(
null,
"first argument of [{}] must be one of {} or their aliases; found value [{}]",
sourceText(),
validDateTimeFieldValues(),
Expressions.name(left())
)
);
} else {
return new TypeResolution(
format(
null,
"Unknown value [{}] for first argument of [{}]; did you mean {}?",
Expressions.name(left()),
sourceText(),
similar
)
);
}
}
}
return TypeResolution.TYPE_RESOLVED;
}
protected abstract boolean resolveDateTimeField(String dateTimeField);
protected abstract List<String> findSimilarDateTimeFields(String dateTimeField);
protected abstract List<String> validDateTimeFieldValues();
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), zoneId());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
BinaryDateTimeDatePartFunction that = (BinaryDateTimeDatePartFunction) o;
return zoneId().equals(that.zoneId());
}
}

View File

@ -13,11 +13,8 @@ import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate;
import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.tree.Source;
import java.time.ZoneId; import java.time.ZoneId;
import java.util.List;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
import static org.elasticsearch.xpack.ql.expression.gen.script.ParamsBuilder.paramsBuilder; import static org.elasticsearch.xpack.ql.expression.gen.script.ParamsBuilder.paramsBuilder;
public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
@ -30,43 +27,12 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
} }
@Override @Override
protected TypeResolution resolveType() { protected abstract TypeResolution resolveType();
TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
if (left().foldable()) {
String datePartValue = (String) left().fold();
if (datePartValue != null && resolveDateTimeField(datePartValue) == false) {
List<String> similar = findSimilarDateTimeFields(datePartValue);
if (similar.isEmpty()) {
return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases; found value [{}]",
sourceText(),
validDateTimeFieldValues(),
Expressions.name(left())));
} else {
return new TypeResolution(format(null, "Unknown value [{}] for first argument of [{}]; did you mean {}?",
Expressions.name(left()),
sourceText(),
similar));
}
}
}
return TypeResolution.TYPE_RESOLVED;
}
public ZoneId zoneId() { public ZoneId zoneId() {
return zoneId; return zoneId;
} }
protected abstract boolean resolveDateTimeField(String dateTimeField);
protected abstract List<String> findSimilarDateTimeFields(String dateTimeField);
protected abstract List<String> validDateTimeFieldValues();
@Override @Override
protected Pipe makePipe() { protected Pipe makePipe() {
return createPipe(Expressions.pipe(left()), Expressions.pipe(right()), zoneId); return createPipe(Expressions.pipe(left()), Expressions.pipe(right()), zoneId);

View File

@ -28,7 +28,7 @@ import java.util.function.ToIntFunction;
import static org.elasticsearch.xpack.sql.expression.SqlTypeResolutions.isDate; import static org.elasticsearch.xpack.sql.expression.SqlTypeResolutions.isDate;
public class DatePart extends BinaryDateTimeFunction { public class DatePart extends BinaryDateTimeDatePartFunction {
public enum Part implements DateTimeField { public enum Part implements DateTimeField {
YEAR(DateTimeExtractor.YEAR::extract, "years", "yyyy", "yy"), YEAR(DateTimeExtractor.YEAR::extract, "years", "yyyy", "yy"),

View File

@ -0,0 +1,70 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Expressions;
import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.type.DataType;
import org.elasticsearch.xpack.ql.type.DataTypes;
import java.time.ZoneId;
import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString;
import static org.elasticsearch.xpack.sql.expression.SqlTypeResolutions.isDateOrTime;
public class DateTimeFormat extends BinaryDateTimeFunction {
public DateTimeFormat(Source source, Expression timestamp, Expression pattern, ZoneId zoneId) {
super(source, timestamp, pattern, zoneId);
}
@Override
public DataType dataType() {
return DataTypes.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = isDateOrTime(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
resolution = isString(right(), sourceText(), Expressions.ParamOrdinal.SECOND);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
protected BinaryScalarFunction replaceChildren(Expression timestamp, Expression pattern) {
return new DateTimeFormat(source(), timestamp, pattern, zoneId());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DateTimeFormat::new, left(), right(), zoneId());
}
@Override
protected String scriptMethodName() {
return "dateTimeFormat";
}
@Override
public Object fold() {
return DateTimeFormatProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe timestamp, Pipe pattern, ZoneId zoneId) {
return new DateTimeFormatPipe(source(), this, timestamp, pattern, zoneId);
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import java.time.ZoneId;
public class DateTimeFormatPipe extends BinaryDateTimePipe {
public DateTimeFormatPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
super(source, expression, left, right, zoneId);
}
@Override
protected NodeInfo<DateTimeFormatPipe> info() {
return NodeInfo.create(this, DateTimeFormatPipe::new, expression(), left(), right(), zoneId());
}
@Override
protected DateTimeFormatPipe replaceChildren(Pipe left, Pipe right) {
return new DateTimeFormatPipe(source(), expression(), left, right, zoneId());
}
@Override
protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) {
return new DateTimeFormatProcessor(left, right, zoneId);
}
}

View File

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.xpack.ql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import java.io.IOException;
import java.time.DateTimeException;
import java.time.OffsetTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeAtZone;
public class DateTimeFormatProcessor extends BinaryDateTimeProcessor {
public static final String NAME = "dtformat";
public DateTimeFormatProcessor(Processor source1, Processor source2, ZoneId zoneId) {
super(source1, source2, zoneId);
}
public DateTimeFormatProcessor(StreamInput in) throws IOException {
super(in);
}
/**
* Used in Painless scripting
*/
public static Object process(Object timestamp, Object pattern, ZoneId zoneId) {
if (timestamp == null || pattern == null) {
return null;
}
if (pattern instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", pattern);
}
if (((String) pattern).isEmpty()) {
return null;
}
if (timestamp instanceof ZonedDateTime == false && timestamp instanceof OffsetTime == false) {
throw new SqlIllegalArgumentException("A date/datetime/time is required; received [{}]", timestamp);
}
TemporalAccessor ta;
if (timestamp instanceof ZonedDateTime) {
ta = ((ZonedDateTime) timestamp).withZoneSameInstant(zoneId);
} else {
ta = asTimeAtZone((OffsetTime) timestamp, zoneId);
}
try {
return DateTimeFormatter.ofPattern((String) pattern, Locale.ROOT).format(ta);
} catch (IllegalArgumentException | DateTimeException e) {
throw new SqlIllegalArgumentException(
"Invalid pattern [{}] is received for formatting date/time [{}]; {}",
pattern,
timestamp,
e.getMessage()
);
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Object doProcess(Object timestamp, Object pattern) {
return process(timestamp, pattern, zoneId());
}
}

View File

@ -37,7 +37,7 @@ import static org.elasticsearch.xpack.ql.util.DateUtils.SECONDS_PER_MINUTE;
import static org.elasticsearch.xpack.sql.expression.SqlTypeResolutions.isDateOrInterval; import static org.elasticsearch.xpack.sql.expression.SqlTypeResolutions.isDateOrInterval;
import static org.elasticsearch.xpack.sql.type.SqlDataTypes.isInterval; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.isInterval;
public class DateTrunc extends BinaryDateTimeFunction { public class DateTrunc extends BinaryDateTimeDatePartFunction {
public enum Part implements DateTimeField { public enum Part implements DateTimeField {

View File

@ -13,6 +13,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiffProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiffProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFormatProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor;
@ -284,7 +285,11 @@ public class InternalSqlScriptUtils extends InternalQlScriptUtils {
} }
public static Integer datePart(String dateField, Object dateTime, String tzId) { public static Integer datePart(String dateField, Object dateTime, String tzId) {
return (Integer) DatePartProcessor.process(dateField, asDateTime(dateTime) , ZoneId.of(tzId)); return (Integer) DatePartProcessor.process(dateField, asDateTime(dateTime), ZoneId.of(tzId));
}
public static String dateTimeFormat(Object dateTime, String pattern, String tzId) {
return (String) DateTimeFormatProcessor.process(asDateTime(dateTime), pattern, ZoneId.of(tzId));
} }
public static ZonedDateTime asDateTime(Object dateTime) { public static ZonedDateTime asDateTime(Object dateTime) {

View File

@ -129,6 +129,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS
Integer dateDiff(String, Object, Object, String) Integer dateDiff(String, Object, Object, String)
def dateTrunc(String, Object, String) def dateTrunc(String, Object, String)
Integer datePart(String, Object, String) Integer datePart(String, Object, String)
String dateTimeFormat(Object, String, String)
IntervalDayTime intervalDayTime(String, String) IntervalDayTime intervalDayTime(String, String)
IntervalYearMonth intervalYearMonth(String, String) IntervalYearMonth intervalYearMonth(String, String)
ZonedDateTime asDateTime(Object) ZonedDateTime asDateTime(Object)

View File

@ -212,6 +212,15 @@ public class VerifierErrorMessagesTests extends ESTestCase {
assertEquals("1:8: Invalid datetime field [ABS]. Use any datetime function.", error("SELECT EXTRACT(ABS FROM date) FROM test")); assertEquals("1:8: Invalid datetime field [ABS]. Use any datetime function.", error("SELECT EXTRACT(ABS FROM date) FROM test"));
} }
public void testDateTruncValidArgs() {
accept("SELECT DATE_TRUNC('decade', date) FROM test");
accept("SELECT DATE_TRUNC('decades', date) FROM test");
accept("SELECT DATETRUNC('day', date) FROM test");
accept("SELECT DATETRUNC('days', date) FROM test");
accept("SELECT DATE_TRUNC('dd', date) FROM test");
accept("SELECT DATE_TRUNC('d', date) FROM test");
}
public void testDateTruncInvalidArgs() { public void testDateTruncInvalidArgs() {
assertEquals("1:8: first argument of [DATE_TRUNC(int, date)] must be [string], found value [int] type [integer]", assertEquals("1:8: first argument of [DATE_TRUNC(int, date)] must be [string], found value [int] type [integer]",
error("SELECT DATE_TRUNC(int, date) FROM test")); error("SELECT DATE_TRUNC(int, date) FROM test"));
@ -285,15 +294,6 @@ public class VerifierErrorMessagesTests extends ESTestCase {
error("SELECT DATE_DIFF('dz', int, date) FROM test")); error("SELECT DATE_DIFF('dz', int, date) FROM test"));
} }
public void testDateTruncValidArgs() {
accept("SELECT DATE_TRUNC('decade', date) FROM test");
accept("SELECT DATE_TRUNC('decades', date) FROM test");
accept("SELECT DATETRUNC('day', date) FROM test");
accept("SELECT DATETRUNC('days', date) FROM test");
accept("SELECT DATE_TRUNC('dd', date) FROM test");
accept("SELECT DATE_TRUNC('d', date) FROM test");
}
public void testDatePartInvalidArgs() { public void testDatePartInvalidArgs() {
assertEquals("1:8: first argument of [DATE_PART(int, date)] must be [string], found value [int] type [integer]", assertEquals("1:8: first argument of [DATE_PART(int, date)] must be [string], found value [int] type [integer]",
error("SELECT DATE_PART(int, date) FROM test")); error("SELECT DATE_PART(int, date) FROM test"));
@ -320,6 +320,23 @@ public class VerifierErrorMessagesTests extends ESTestCase {
accept("SELECT DATE_PART('ms', date) FROM test"); accept("SELECT DATE_PART('ms', date) FROM test");
} }
public void testDateTimeFormatValidArgs() {
accept("SELECT DATETIME_FORMAT(date, 'HH:mm:ss.SSS VV') FROM test");
accept("SELECT DATETIME_FORMAT(date::date, 'MM/dd/YYYY') FROM test");
accept("SELECT DATETIME_FORMAT(date::time, 'HH:mm:ss Z') FROM test");
}
public void testDateTimeFormatInvalidArgs() {
assertEquals(
"1:8: first argument of [DATETIME_FORMAT(int, keyword)] must be [date, time or datetime], found value [int] type [integer]",
error("SELECT DATETIME_FORMAT(int, keyword) FROM test")
);
assertEquals(
"1:8: second argument of [DATETIME_FORMAT(date, int)] must be [string], found value [int] type [integer]",
error("SELECT DATETIME_FORMAT(date, int) FROM test")
);
}
public void testValidDateTimeFunctionsOnTime() { public void testValidDateTimeFunctionsOnTime() {
accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test"); accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test");
accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test"); accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test");

View File

@ -0,0 +1,126 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.BinaryPipe;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.tree.AbstractNodeTestCase;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.tree.SourceTests;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.ql.expression.Expressions.pipe;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.ql.tree.SourceTests.randomSource;
public class DateTimeFormatPipeTests extends AbstractNodeTestCase<DateTimeFormatPipe, Pipe> {
public static DateTimeFormatPipe randomDateTimeFormatPipe() {
return (DateTimeFormatPipe) new DateTimeFormat(randomSource(), randomDatetimeLiteral(), randomStringLiteral(), randomZone())
.makePipe();
}
@Override
protected DateTimeFormatPipe randomInstance() {
return randomDateTimeFormatPipe();
}
private Expression randomDateTimeFormatPipeExpression() {
return randomDateTimeFormatPipe().expression();
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (the two parameters of the binary function) which are tested separately
DateTimeFormatPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTimeFormatPipeExpression);
DateTimeFormatPipe newB = new DateTimeFormatPipe(b1.source(), newExpression, b1.left(), b1.right(), b1.zoneId());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
DateTimeFormatPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new DateTimeFormatPipe(newLoc, b2.expression(), b2.left(), b2.right(), b2.zoneId());
assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
}
@Override
public void testReplaceChildren() {
DateTimeFormatPipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomDatetimeLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomStringLiteral)));
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
DateTimeFormatPipe newB = new DateTimeFormatPipe(b.source(), b.expression(), b.left(), b.right(), newZoneId);
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), b.right());
transformed = newB.replaceChildren(b.left(), newRight);
assertEquals(transformed.left(), b.left());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
transformed = newB.replaceChildren(newLeft, newRight);
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
}
@Override
protected DateTimeFormatPipe mutate(DateTimeFormatPipe instance) {
List<Function<DateTimeFormatPipe, DateTimeFormatPipe>> randoms = new ArrayList<>();
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)
)
);
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)
)
);
randoms.add(
f -> new DateTimeFormatPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomDatetimeLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)
)
);
return randomFrom(randoms).apply(instance);
}
@Override
protected DateTimeFormatPipe copy(DateTimeFormatPipe instance) {
return new DateTimeFormatPipe(instance.source(), instance.expression(), instance.left(), instance.right(), instance.zoneId());
}
}

View File

@ -0,0 +1,145 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ql.expression.Literal;
import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import java.time.Instant;
import java.time.OffsetTime;
import java.time.ZoneId;
import static org.elasticsearch.xpack.ql.expression.Literal.NULL;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.time;
import static org.elasticsearch.xpack.sql.type.SqlDataTypes.TIME;
public class DateTimeFormatProcessorTests extends AbstractSqlWireSerializingTestCase<DateTimeFormatProcessor> {
public static DateTimeFormatProcessor randomDateTimeFormatProcessor() {
return new DateTimeFormatProcessor(
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
randomZone()
);
}
public static Literal randomTimeLiteral() {
return l(OffsetTime.ofInstant(Instant.ofEpochMilli(ESTestCase.randomLong()), ESTestCase.randomZone()), TIME);
}
@Override
protected DateTimeFormatProcessor createTestInstance() {
return randomDateTimeFormatProcessor();
}
@Override
protected Reader<DateTimeFormatProcessor> instanceReader() {
return DateTimeFormatProcessor::new;
}
@Override
protected ZoneId instanceZoneId(DateTimeFormatProcessor instance) {
return instance.zoneId();
}
@Override
protected DateTimeFormatProcessor mutateInstance(DateTimeFormatProcessor instance) {
return new DateTimeFormatProcessor(
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone)
);
}
public void testInvalidInputs() {
SqlIllegalArgumentException siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new DateTimeFormat(Source.EMPTY, l("foo"), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals("A date/datetime/time is required; received [foo]", siae.getMessage());
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new DateTimeFormat(Source.EMPTY, randomDatetimeLiteral(), l(5), randomZone()).makePipe().asProcessor().process(null)
);
assertEquals("A string is required; received [5]", siae.getMessage());
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new DateTimeFormat(Source.EMPTY, l(dateTime(2019, 9, 3, 18, 10, 37, 0)), l("invalid"), randomZone()).makePipe()
.asProcessor()
.process(null)
);
assertEquals(
"Invalid pattern [invalid] is received for formatting date/time [2019-09-03T18:10:37Z]; Unknown pattern letter: i",
siae.getMessage()
);
siae = expectThrows(
SqlIllegalArgumentException.class,
() -> new DateTimeFormat(Source.EMPTY, l(time(18, 10, 37, 123000000)), l("MM/dd"), randomZone()).makePipe()
.asProcessor()
.process(null)
);
assertEquals(
"Invalid pattern [MM/dd] is received for formatting date/time [18:10:37.123Z]; Unsupported field: MonthOfYear",
siae.getMessage()
);
}
public void testWithNulls() {
assertNull(new DateTimeFormat(Source.EMPTY, randomDatetimeLiteral(), NULL, randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateTimeFormat(Source.EMPTY, randomDatetimeLiteral(), l(""), randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateTimeFormat(Source.EMPTY, NULL, randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null));
}
public void testFormatting() {
ZoneId zoneId = ZoneId.of("Etc/GMT-10");
Literal dateTime = l(dateTime(2019, 9, 3, 18, 10, 37, 123456789));
assertEquals("AD : 3", new DateTimeFormat(Source.EMPTY, dateTime, l("G : Q"), zoneId).makePipe().asProcessor().process(null));
assertEquals(
"2019-09-04",
new DateTimeFormat(Source.EMPTY, dateTime, l("YYYY-MM-dd"), zoneId).makePipe().asProcessor().process(null)
);
assertEquals(
"04:10:37.123456",
new DateTimeFormat(Source.EMPTY, dateTime, l("HH:mm:ss.SSSSSS"), zoneId).makePipe().asProcessor().process(null)
);
assertEquals(
"2019-09-04 04:10:37.12345678",
new DateTimeFormat(Source.EMPTY, dateTime, l("YYYY-MM-dd HH:mm:ss.SSSSSSSS"), zoneId).makePipe().asProcessor().process(null)
);
assertEquals("+1000", new DateTimeFormat(Source.EMPTY, dateTime, l("Z"), zoneId).makePipe().asProcessor().process(null));
assertEquals("Etc/GMT-10", new DateTimeFormat(Source.EMPTY, dateTime, l("z"), zoneId).makePipe().asProcessor().process(null));
assertEquals("Etc/GMT-10", new DateTimeFormat(Source.EMPTY, dateTime, l("VV"), zoneId).makePipe().asProcessor().process(null));
zoneId = ZoneId.of("America/Sao_Paulo");
assertEquals("-0300", new DateTimeFormat(Source.EMPTY, dateTime, l("Z"), zoneId).makePipe().asProcessor().process(null));
assertEquals("BRT", new DateTimeFormat(Source.EMPTY, dateTime, l("z"), zoneId).makePipe().asProcessor().process(null));
assertEquals(
"America/Sao_Paulo",
new DateTimeFormat(Source.EMPTY, dateTime, l("VV"), zoneId).makePipe().asProcessor().process(null)
);
assertEquals(
"07:11:22.1234",
new DateTimeFormat(Source.EMPTY, l(time(10, 11, 22, 123456789), TIME), l("HH:mm:ss.SSSS"), zoneId).makePipe()
.asProcessor()
.process(null)
);
}
}

View File

@ -445,6 +445,22 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("[{v=month}, {v=date}, {v=Z}, {v=2018-09-04T00:00:00.000Z}]", sc.script().params().toString()); assertEquals("[{v=month}, {v=date}, {v=Z}, {v=2018-09-04T00:00:00.000Z}]", sc.script().params().toString());
} }
public void testTranslateDateTimeFormat_WhereClause_Painless() {
LogicalPlan p = plan("SELECT int FROM test WHERE DATETIME_FORMAT(date, 'YYYY_MM_dd') = '2018_09_04'");
assertTrue(p instanceof Project);
assertTrue(p.children().get(0) instanceof Filter);
Expression condition = ((Filter) p.children().get(0)).condition();
assertFalse(condition.foldable());
QueryTranslation translation = QueryTranslator.toQuery(condition, false);
assertNull(translation.aggFilter);
assertTrue(translation.query instanceof ScriptQuery);
ScriptQuery sc = (ScriptQuery) translation.query;
assertEquals("InternalQlScriptUtils.nullSafeFilter(InternalQlScriptUtils.eq(InternalSqlScriptUtils.dateTimeFormat(" +
"InternalQlScriptUtils.docValue(doc,params.v0),params.v1,params.v2),params.v3))",
sc.script().toString());
assertEquals("[{v=date}, {v=YYYY_MM_dd}, {v=Z}, {v=2018_09_04}]", sc.script().params().toString());
}
public void testLikeOnInexact() { public void testLikeOnInexact() {
LogicalPlan p = plan("SELECT * FROM test WHERE some.string LIKE '%a%'"); LogicalPlan p = plan("SELECT * FROM test WHERE some.string LIKE '%a%'");
assertTrue(p instanceof Project); assertTrue(p instanceof Project);