SQL: Implement DATEADD function (#47747)

Implement DATEADD/TIMESTAMPADD function as per the MS-SQL spec:
https://docs.microsoft.com/en-us/sql/t-sql/functions/dateadd-transact-sql?view=sql-server-2017
which allows a user to add/subtract specified number of specified units
to/from a date/datetime field/expression.

Closes: #47746
(cherry picked from commit e624bc281bebb4bbe0b0c2e0a8cbc712e50097a8)
This commit is contained in:
Marios Trivyzas 2019-10-10 15:24:36 +02:00
parent b5afa95fd8
commit 59b3294bc9
19 changed files with 1357 additions and 49 deletions

View File

@ -248,6 +248,79 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[filterNow]
Currently, using a _precision_ greater than 3 doesn't make any difference to the output of the Currently, using a _precision_ greater than 3 doesn't make any difference to the output of the
function as the maximum number of second fractional digits returned is 3 (milliseconds). function as the maximum number of second fractional digits returned is 3 (milliseconds).
[[sql-functions-datetime-add]]
==== `DATE_ADD/DATEADD/TIMESTAMPADD/TIMESTAMP_ADD`
.Synopsis:
[source, sql]
--------------------------------------------------
DATE_ADD(
string_exp, <1>
integer_exp, <2>
datetime_exp) <3>
--------------------------------------------------
*Input*:
<1> string expression denoting the date/time unit to add to the date/datetime
<2> integer expression denoting how many times the above unit should be added to/from the date/datetime,
if a negative value is used it results to a subtraction from the date/datetime
<3> date/datetime expression
*Output*: datetime
.Description:
Add the given number of date/time units to a date/datetime. If the number of units is negative then it's subtracted from
the date/datetime. If any of the three arguments is `null` a `null` is returned.
[cols="^,^"]
|===
2+h|Datetime units to add/subtract
s|unit
s|abbreviations
| year | years, yy, yyyy
| quarter | quarters, qq, q
| month | months, mm, m
| dayofyear | dy, y
| day | days, dd, d
| week | weeks, wk, ww
| weekday | weekdays, dw
| hour | hours, hh
| minute | minutes, mi, n
| second | seconds, ss, s
| millisecond | milliseconds, ms
| microsecond | microseconds, mcs
| nanosecond | nanoseconds, ns
|===
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateAddDateTimeYears]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateAddDateTimeWeeks]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateAddDateTimeSeconds]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateAddDateQuarters]
--------------------------------------------------
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/docs/docs.csv-spec[dateAddDateMinutes]
--------------------------------------------------
[[sql-functions-datetime-part]] [[sql-functions-datetime-part]]
==== `DATE_PART/DATEPART` ==== `DATE_PART/DATEPART`

View File

@ -51,6 +51,7 @@
** <<sql-functions-current-date>> ** <<sql-functions-current-date>>
** <<sql-functions-current-time>> ** <<sql-functions-current-time>>
** <<sql-functions-current-timestamp>> ** <<sql-functions-current-timestamp>>
** <<sql-functions-datetime-add>>
** <<sql-functions-datetime-part>> ** <<sql-functions-datetime-part>>
** <<sql-functions-datetime-trunc>> ** <<sql-functions-datetime-trunc>>
** <<sql-functions-datetime-day>> ** <<sql-functions-datetime-day>>

View File

@ -41,8 +41,10 @@ CURRENT_DATE |SCALAR
CURRENT_TIME |SCALAR CURRENT_TIME |SCALAR
CURRENT_TIMESTAMP|SCALAR CURRENT_TIMESTAMP|SCALAR
CURTIME |SCALAR CURTIME |SCALAR
DATEADD |SCALAR
DATEPART |SCALAR DATEPART |SCALAR
DATETRUNC |SCALAR DATETRUNC |SCALAR
DATE_ADD |SCALAR
DATE_PART |SCALAR DATE_PART |SCALAR
DATE_TRUNC |SCALAR DATE_TRUNC |SCALAR
DAY |SCALAR DAY |SCALAR
@ -79,6 +81,8 @@ NOW |SCALAR
QUARTER |SCALAR QUARTER |SCALAR
SECOND |SCALAR SECOND |SCALAR
SECOND_OF_MINUTE |SCALAR SECOND_OF_MINUTE |SCALAR
TIMESTAMPADD |SCALAR
TIMESTAMP_ADD |SCALAR
TODAY |SCALAR TODAY |SCALAR
WEEK |SCALAR WEEK |SCALAR
WEEK_OF_YEAR |SCALAR WEEK_OF_YEAR |SCALAR

View File

@ -123,6 +123,160 @@ SELECT WEEK(birth_date) week, birth_date FROM test_emp WHERE WEEK(birth_date) >
2 |1953-01-07T00:00:00.000Z 2 |1953-01-07T00:00:00.000Z
; ;
selectAddWithDateTime
schema::dt_year:s|dt_quarter:s|dt_month:s|dt_week:s|dt_day:s|dt_hours:s|dt_min:s|dt_sec:s|dt_millis:s|dt_mcsec:s|dt_nsec:s
SELECT DATE_ADD('year', 10, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_year, DATE_ADD('quarter', -10, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_quarter, DATE_ADD('month', 20, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_month,
DATE_ADD('week', -50, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_week, DATE_ADD('day', 200, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_day,
DATE_ADD('hours', -3000, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_hours, DATE_ADD('minutes', 5678, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_min,
DATE_ADD('second', 987654, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_sec, DATE_ADD('milliseconds', -12345678, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_millis,
DATE_ADD('mcs', -12345678, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_mcsec, DATE_ADD('nanosecond', 12345600, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_nsec;
dt_year | dt_quarter | dt_month | dt_week | dt_day | dt_hours | dt_min | dt_sec | dt_millis | dt_mcsec | dt_nsec
-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+-------------------------
2029-09-04T11:22:33.123Z | 2017-03-04T11:22:33.123Z | 2021-05-04T11:22:33.123Z | 2018-09-19T11:22:33.123Z | 2020-03-22T11:22:33.123Z | 2019-05-02T11:22:33.123Z | 2019-09-08T10:00:33.123Z | 2019-09-15T21:43:27.123Z | 2019-09-04T07:56:47.445Z | 2019-09-04T11:22:20.777Z | 2019-09-04T11:22:33.135Z
;
selectAddWithDate
schema::dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts|dt_hours:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_mcsec:s|dt_nsec:s
SELECT DATEADD('year', 10, '2019-09-04'::date) as dt_year, DATEADD('quarter', -10, '2019-09-04'::date) as dt_quarter, DATEADD('month', 20, '2019-09-04'::date) as dt_month,
DATEADD('week', -50, '2019-09-04'::date) as dt_week, DATEADD('day', 200, '2019-09-04'::date) as dt_day,
DATEADD('hours', -3000, '2019-09-04'::date) as dt_hours, DATEADD('minutes', 5678, '2019-09-04'::date) as dt_min, DATEADD('second', 987654, '2019-09-04'::date) as dt_sec,
DATEADD('milliseconds', 4314144, '2019-09-04'::date)::string as dt_millis, DATEADD('mcs', -12345678, '2019-09-04'::date)::string as dt_mcsec, DATEADD('nanosecond', 12345600, '2019-09-04'::date)::string as dt_nsec;
dt_year | dt_quarter | dt_month | dt_week | dt_day | dt_hours | dt_min | dt_sec | dt_millis | dt_mcsec | dt_nsec
-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+-------------------------
2029-09-04 00:00:00.000Z | 2017-03-04 00:00:00.000Z | 2021-05-04 00:00:00.000Z | 2018-09-19 00:00:00.000Z | 2020-03-22 00:00:00.000Z | 2019-05-02 00:00:00.000Z | 2019-09-07 22:38:00.000Z | 2019-09-15 10:20:54.000Z | 2019-09-04T01:11:54.144Z | 2019-09-03T23:59:47.654Z | 2019-09-04T00:00:00.012Z
;
selectDateAddWithField
schema::emp_no:i|birth_date:ts|dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts|dt_min:ts|dt_sec:ts
SELECT emp_no, birth_date, TIMESTAMP_ADD('year', 10, birth_date) as dt_year, TIMESTAMP_ADD('quarter', -10, birth_date) as dt_quarter,
TIMESTAMP_ADD('month', 20, birth_date) as dt_month, TIMESTAMP_ADD('week', -20, birth_date) as dt_week, TIMESTAMP_ADD('day', 300, birth_date) as dt_day,
TIMESTAMP_ADD('minutes', 10000, birth_date) as dt_min, TIMESTAMP_ADD('second', 100000, birth_date) as dt_sec
FROM test_emp WHERE emp_no >= 10032 AND emp_no <= 10042 ORDER BY 1;
emp_no | birth_date | dt_year | dt_quarter | dt_month | dt_week | dt_day | dt_min | dt_sec
----------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+-------------------------
10032 | 1960-08-09 00:00:00.000Z | 1970-08-09 00:00:00.000Z | 1958-02-09 00:00:00.000Z | 1962-04-09 00:00:00.000Z | 1960-03-22 00:00:00.000Z | 1961-06-05 00:00:00.000Z | 1960-08-15 22:40:00.000Z | 1960-08-10 03:46:40.000Z
10033 | 1956-11-14 00:00:00.000Z | 1966-11-14 00:00:00.000Z | 1954-05-14 00:00:00.000Z | 1958-07-14 00:00:00.000Z | 1956-06-27 00:00:00.000Z | 1957-09-10 00:00:00.000Z | 1956-11-20 22:40:00.000Z | 1956-11-15 03:46:40.000Z
10034 | 1962-12-29 00:00:00.000Z | 1972-12-29 00:00:00.000Z | 1960-06-29 00:00:00.000Z | 1964-08-29 00:00:00.000Z | 1962-08-11 00:00:00.000Z | 1963-10-25 00:00:00.000Z | 1963-01-04 22:40:00.000Z | 1962-12-30 03:46:40.000Z
10035 | 1953-02-08 00:00:00.000Z | 1963-02-08 00:00:00.000Z | 1950-08-08 00:00:00.000Z | 1954-10-08 00:00:00.000Z | 1952-09-21 00:00:00.000Z | 1953-12-05 00:00:00.000Z | 1953-02-14 22:40:00.000Z | 1953-02-09 03:46:40.000Z
10036 | 1959-08-10 00:00:00.000Z | 1969-08-10 00:00:00.000Z | 1957-02-10 00:00:00.000Z | 1961-04-10 00:00:00.000Z | 1959-03-23 00:00:00.000Z | 1960-06-05 00:00:00.000Z | 1959-08-16 22:40:00.000Z | 1959-08-11 03:46:40.000Z
10037 | 1963-07-22 00:00:00.000Z | 1973-07-22 00:00:00.000Z | 1961-01-22 00:00:00.000Z | 1965-03-22 00:00:00.000Z | 1963-03-04 00:00:00.000Z | 1964-05-17 00:00:00.000Z | 1963-07-28 22:40:00.000Z | 1963-07-23 03:46:40.000Z
10038 | 1960-07-20 00:00:00.000Z | 1970-07-20 00:00:00.000Z | 1958-01-20 00:00:00.000Z | 1962-03-20 00:00:00.000Z | 1960-03-02 00:00:00.000Z | 1961-05-16 00:00:00.000Z | 1960-07-26 22:40:00.000Z | 1960-07-21 03:46:40.000Z
10039 | 1959-10-01 00:00:00.000Z | 1969-10-01 00:00:00.000Z | 1957-04-01 00:00:00.000Z | 1961-06-01 00:00:00.000Z | 1959-05-14 00:00:00.000Z | 1960-07-27 00:00:00.000Z | 1959-10-07 22:40:00.000Z | 1959-10-02 03:46:40.000Z
10040 | null | null | null | null | null | null | null | null
10041 | null | null | null | null | null | null | null | null
10042 | null | null | null | null | null | null | null | null
;
selectAddWithComplexExpressions1
SELECT gender, birth_date, TIMESTAMPADD('months', CASE WHEN gender = 'M' THEN 10 WHEN gender = 'F' THEN -10 ELSE 100 END,
birth_date + INTERVAL 10 month) AS dt FROM test_emp WHERE dt > '1954-07-01'::date ORDER BY emp_no LIMIT 10;
gender:s | birth_date:ts | dt:ts
------------+--------------------------+-------------------------
M | 1953-09-02 00:00:00.000Z | 1955-05-02 00:00:00.000Z
F | 1964-06-02 00:00:00.000Z | 1964-06-02 00:00:00.000Z
M | 1959-12-03 00:00:00.000Z | 1961-08-03 00:00:00.000Z
M | 1954-05-01 00:00:00.000Z | 1956-01-01 00:00:00.000Z
M | 1955-01-21 00:00:00.000Z | 1956-09-21 00:00:00.000Z
F | 1957-05-23 00:00:00.000Z | 1957-05-23 00:00:00.000Z
M | 1958-02-19 00:00:00.000Z | 1959-10-19 00:00:00.000Z
null | 1963-06-01 00:00:00.000Z | 1972-08-01 00:00:00.000Z
null | 1953-11-07 00:00:00.000Z | 1963-01-07 00:00:00.000Z
null | 1960-10-04 00:00:00.000Z | 1969-12-04 00:00:00.000Z
;
selectAddWithComplexExpressions2
schema::languages:byte|first_name:s|gender:s|hire_date:ts|date_add:date
SELECT languages, first_name, gender, hire_date,
CAST(DATE_ADD(CASE WHEN gender = 'M' THEN CONCAT(gender, 'onths') WHEN gender = 'F' THEN NULL ELSE 'quarter' END,
5, hire_date + INTERVAL 10 month) AS DATE) AS date_add
FROM test_emp WHERE languages >= 3 AND first_name LIKE '%y%' ORDER BY date_add ASC, languages DESC;
languages | first_name | gender | hire_date | date_add
---------------+---------------+---------------+-------------------------+---------------
5 | Hironoby | F | 1988-07-21 00:00:00.00Z | null
4 | Weiyi | F | 1993-02-14 00:00:00.00Z | null
3 | Magy | F | 1993-03-21 00:00:00.00Z | null
4 | Jayson | M | 1990-01-14 00:00:00.00Z | 1991-04-14
5 | Mary | null | 1990-01-22 00:00:00.00Z | 1992-02-22
5 | Georgy | M | 1992-04-27 00:00:00.00Z | 1993-07-27
4 | Mayumi | M | 1995-03-13 00:00:00.00Z | 1996-06-13
;
selectAddWithComplexExpressions3
schema::first_name:s|gender:s|hire_date:ts|date_add1:ts|date_add2:i
SELECT first_name, gender, hire_date, DATE_ADD(CASE WHEN gender = 'M' THEN CONCAT(gender, 'onths') WHEN gender = 'F' THEN 'year' ELSE 'quarter' END,
5, hire_date + INTERVAL 10 month) AS date_add1,
YEAR(DATE_ADD(CASE WHEN gender = 'M' THEN CONCAT(gender, 'onths') WHEN gender = 'F' THEN 'year' ELSE 'quarter' END,
5, hire_date + INTERVAL 10 month)) AS date_add2
FROM test_emp WHERE YEAR(date_add1) > 1990 AND first_name LIKE '%y%' ORDER BY date_add1 DESC LIMIT 15;
first_name | gender | hire_date | date_add1 | date_add2
---------------+---------------+-------------------------+-------------------------+------------
Magy | F | 1993-03-21 00:00:00.00Z | 1999-01-21 00:00:00.00Z | 1999
Weiyi | F | 1993-02-14 00:00:00.00Z | 1998-12-14 00:00:00.00Z | 1998
Mayumi | M | 1995-03-13 00:00:00.00Z | 1996-06-13 00:00:00.00Z | 1996
Saniya | M | 1994-09-15 00:00:00.00Z | 1995-12-15 00:00:00.00Z | 1995
Hironoby | F | 1988-07-21 00:00:00.00Z | 1994-05-21 00:00:00.00Z | 1994
Georgy | M | 1992-04-27 00:00:00.00Z | 1993-07-27 00:00:00.00Z | 1993
Mayuko | M | 1991-01-26 00:00:00.00Z | 1992-04-26 00:00:00.00Z | 1992
Mary | null | 1990-01-22 00:00:00.00Z | 1992-02-22 00:00:00.00Z | 1992
Yishay | M | 1990-10-20 00:00:00.00Z | 1992-01-20 00:00:00.00Z | 1992
Jayson | M | 1990-01-14 00:00:00.00Z | 1991-04-14 00:00:00.00Z | 1991
;
dateAddOrderBy
schema::emp_no:i|hire_date:ts|dt:ts
SELECT emp_no, hire_date, DATE_ADD('hours', -4000, hire_date) as dt FROM test_emp ORDER BY dt NULLS LAST, emp_no LIMIT 5;
emp_no | hire_date | dt
--------+--------------------------+-------------------------
10009 | 1985-02-18 00:00:00.000Z | 1984-09-04 08:00:00.000Z
10048 | 1985-02-24 00:00:00.000Z | 1984-09-10 08:00:00.000Z
10098 | 1985-05-13 00:00:00.000Z | 1984-11-27 08:00:00.000Z
10076 | 1985-07-09 00:00:00.000Z | 1985-01-23 08:00:00.000Z
10061 | 1985-09-17 00:00:00.000Z | 1985-04-03 08:00:00.000Z
;
dateAddFilter
schema::emp_no:i|hire_date:ts|dt:ts
SELECT emp_no, hire_date, DATE_ADD('quarter', 42, hire_date) as dt FROM test_emp WHERE DATE_ADD('quarter', 42, hire_date) > '2000-01-01'::date ORDER BY emp_no LIMIT 5;
emp_no | hire_date | dt
--------+--------------------------+-------------------------
10005 | 1989-09-12 00:00:00.000Z | 2000-03-12 00:00:00.000Z
10008 | 1994-09-15 00:00:00.000Z | 2005-03-15 00:00:00.000Z
10010 | 1989-08-24 00:00:00.000Z | 2000-02-24 00:00:00.000Z
10011 | 1990-01-22 00:00:00.000Z | 2000-07-22 00:00:00.000Z
10012 | 1992-12-18 00:00:00.000Z | 2003-06-18 00:00:00.000Z
;
dateAddGroupBy
schema::count:l|dt:ts
SELECT count(*) as count, DATE_ADD('weeks', -120, hire_date) dt FROM test_emp GROUP BY dt ORDER BY 2 LIMIT 5;
count | dt
--------+-------------------------
1 | 1982-11-01 00:00:00.000Z
1 | 1982-11-07 00:00:00.000Z
1 | 1983-01-24 00:00:00.000Z
1 | 1983-03-22 00:00:00.000Z
1 | 1983-05-31 00:00:00.000Z
;
dateAddHaving
schema::gender:s|dt:ts
SELECT gender, DATE_ADD('months', 60, max(hire_date)) AS dt FROM test_emp GROUP BY gender HAVING DATE_ADD('months', 60, max(hire_date)) >= '2002-01-01T00:00:00.000Z'::timestamp ORDER BY 1;
gender | dt
--------+-------------------------
null | 2004-04-30 00:00:00.000Z
F | 2002-05-19 00:00:00.000Z
;
selectDateTruncWithDateTime selectDateTruncWithDateTime
schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s
SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min, SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min,

View File

@ -237,8 +237,10 @@ CURRENT_DATE |SCALAR
CURRENT_TIME |SCALAR CURRENT_TIME |SCALAR
CURRENT_TIMESTAMP|SCALAR CURRENT_TIMESTAMP|SCALAR
CURTIME |SCALAR CURTIME |SCALAR
DATEADD |SCALAR
DATEPART |SCALAR DATEPART |SCALAR
DATETRUNC |SCALAR DATETRUNC |SCALAR
DATE_ADD |SCALAR
DATE_PART |SCALAR DATE_PART |SCALAR
DATE_TRUNC |SCALAR DATE_TRUNC |SCALAR
DAY |SCALAR DAY |SCALAR
@ -275,6 +277,8 @@ NOW |SCALAR
QUARTER |SCALAR QUARTER |SCALAR
SECOND |SCALAR SECOND |SCALAR
SECOND_OF_MINUTE |SCALAR SECOND_OF_MINUTE |SCALAR
TIMESTAMPADD |SCALAR
TIMESTAMP_ADD |SCALAR
TODAY |SCALAR TODAY |SCALAR
WEEK |SCALAR WEEK |SCALAR
WEEK_OF_YEAR |SCALAR WEEK_OF_YEAR |SCALAR
@ -2417,6 +2421,56 @@ SELECT DAY_OF_MONTH(CAST('2018-02-19T10:23:27Z' AS TIMESTAMP)) AS day;
// end::dayOfMonth // end::dayOfMonth
; ;
dateAddDateTimeYears
// tag::dateAddDateTimeYears
SELECT DATE_ADD('years', 10, '2019-09-04T11:22:33.000Z'::datetime) AS "+10 years";
+10 years
------------------------
2029-09-04T11:22:33.000Z
// end::dateAddDateTimeYears
;
dateAddDateTimeWeeks
// tag::dateAddDateTimeWeeks
SELECT DATE_ADD('week', 10, '2019-09-04T11:22:33.000Z'::datetime) AS "+10 weeks";
+10 weeks
------------------------
2019-11-13T11:22:33.000Z
// end::dateAddDateTimeWeeks
;
dateAddDateTimeSeconds
// tag::dateAddDateTimeSeconds
SELECT DATE_ADD('seconds', -1234, '2019-09-04T11:22:33.000Z'::datetime) AS "-1234 seconds";
-1234 seconds
------------------------
2019-09-04T11:01:59.000Z
// end::dateAddDateTimeSeconds
;
dateAddDateQuarters
// tag::dateAddDateQuarters
SELECT DATE_ADD('qq', -417, '2019-09-04'::date) AS "-417 quarters";
-417 quarters
------------------------
1915-06-04T00:00:00.000Z
// end::dateAddDateQuarters
;
dateAddDateMinutes
// tag::dateAddDateMinutes
SELECT DATE_ADD('minutes', 9235, '2019-09-04'::date) AS "+9235 minutes";
+9235 minutes
------------------------
2019-09-10T09:55:00.000Z
// end::dateAddDateMinutes
;
datePartDateTimeYears datePartDateTimeYears
// tag::datePartDateTimeYears // tag::datePartDateTimeYears
SELECT DATE_PART('year', '2019-09-22T11:22:33.123Z'::datetime) AS "years"; SELECT DATE_PART('year', '2019-09-22T11:22:33.123Z'::datetime) AS "years";

View File

@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.User;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDate; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDate;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentTime; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentTime;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName;
@ -195,6 +196,7 @@ public class FunctionRegistry {
def(DayOfMonth.class, DayOfMonth::new, "DAY_OF_MONTH", "DAYOFMONTH", "DAY", "DOM"), def(DayOfMonth.class, DayOfMonth::new, "DAY_OF_MONTH", "DAYOFMONTH", "DAY", "DOM"),
def(DayOfWeek.class, DayOfWeek::new, "DAY_OF_WEEK", "DAYOFWEEK", "DOW"), def(DayOfWeek.class, DayOfWeek::new, "DAY_OF_WEEK", "DAYOFWEEK", "DOW"),
def(DayOfYear.class, DayOfYear::new, "DAY_OF_YEAR", "DAYOFYEAR", "DOY"), def(DayOfYear.class, DayOfYear::new, "DAY_OF_YEAR", "DAYOFYEAR", "DOY"),
def(DateAdd.class, DateAdd::new, "DATEADD", "DATE_ADD", "TIMESTAMPADD", "TIMESTAMP_ADD"),
def(DatePart.class, DatePart::new, "DATEPART", "DATE_PART"), def(DatePart.class, DatePart::new, "DATEPART", "DATE_PART"),
def(DateTrunc.class, DateTrunc::new, "DATETRUNC", "DATE_TRUNC"), def(DateTrunc.class, DateTrunc::new, "DATETRUNC", "DATE_TRUNC"),
def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"), def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"),
@ -512,6 +514,28 @@ public class FunctionRegistry {
T build(Source source, Expression lhs, Expression rhs, ZoneId zi); T build(Source source, Expression lhs, Expression rhs, ZoneId zi);
} }
/**
* Build a {@linkplain FunctionDefinition} for a three-args function that
* requires a timezone.
*/
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
static <T extends Function> FunctionDefinition def(Class<T> function, DatetimeThreeArgsFunctionBuilder<T> ctorRef, String... names) {
FunctionBuilder builder = (source, children, distinct, cfg) -> {
if (children.size() != 3) {
throw new SqlIllegalArgumentException("expects three arguments");
}
if (distinct) {
throw new SqlIllegalArgumentException("does not support DISTINCT yet it was specified");
}
return ctorRef.build(source, children.get(0), children.get(1), children.get(2), cfg.zoneId());
};
return def(function, builder, false, names);
}
interface DatetimeThreeArgsFunctionBuilder<T> {
T build(Source source, Expression first, Expression second, Expression third, ZoneId zi);
}
/** /**
* Build a {@linkplain FunctionDefinition} for a binary function that is * Build a {@linkplain FunctionDefinition} for a binary function that is
* not aware of time zone and does not support {@code DISTINCT}. * not aware of time zone and does not support {@code DISTINCT}.

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
@ -90,6 +91,7 @@ public final class Processors {
entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new)); entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new));
entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new)); entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new));
entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new)); entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new));
entries.add(new Entry(Processor.class, DateAddProcessor.NAME, DateAddProcessor::new));
entries.add(new Entry(Processor.class, DatePartProcessor.NAME, DatePartProcessor::new)); entries.add(new Entry(Processor.class, DatePartProcessor.NAME, DatePartProcessor::new));
entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new)); entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new));
// math // math

View File

@ -0,0 +1,173 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Nullability;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.type.DataType;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate;
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isInteger;
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString;
public class DateAdd extends ThreeArgsDateTimeFunction {
public enum Part implements DateTimeField {
YEAR((dt, i) -> dt.plus(i, ChronoUnit.YEARS), "years", "yyyy", "yy"),
QUARTER((dt, i) -> dt.plus(i * 3, ChronoUnit.MONTHS), "quarters", "qq", "q"),
MONTH((dt, i) -> dt.plus(i, ChronoUnit.MONTHS), "months", "mm", "m"),
DAYOFYEAR((dt, i) -> dt.plus(i, ChronoUnit.DAYS), "dy", "y"),
DAY((dt, i) -> dt.plus(i, ChronoUnit.DAYS), "days", "dd", "d"),
WEEK((dt, i) -> dt.plus(i, ChronoUnit.WEEKS), "weeks", "wk", "ww"),
WEEKDAY((dt, i) -> dt.plus(i, ChronoUnit.DAYS), "weekdays", "dw"),
HOUR((dt, i) -> dt.plus(i, ChronoUnit.HOURS), "hours", "hh"),
MINUTE((dt, i) -> dt.plus(i, ChronoUnit.MINUTES), "minutes", "mi", "n"),
SECOND((dt, i) -> dt.plus(i, ChronoUnit.SECONDS), "seconds", "ss", "s"),
MILLISECOND((dt, i) -> dt.plus(i, ChronoUnit.MILLIS), "milliseconds", "ms"),
MICROSECOND((dt, i) -> dt.plus(i, ChronoUnit.MICROS), "microseconds", "mcs"),
NANOSECOND((dt, i) -> dt.plus(i, ChronoUnit.NANOS), "nanoseconds", "ns");
private static final Map<String, Part> NAME_TO_PART;
private static final List<String> VALID_VALUES;
static {
NAME_TO_PART = DateTimeField.initializeResolutionMap(values());
VALID_VALUES = DateTimeField.initializeValidValues(values());
}
private BiFunction<ZonedDateTime, Integer, ZonedDateTime> addFunction;
private Set<String> aliases;
Part(BiFunction<ZonedDateTime, Integer, ZonedDateTime> addFunction, String... aliases) {
this.addFunction = addFunction;
this.aliases = new HashSet<>(Arrays.asList(aliases));
}
@Override
public Iterable<String> aliases() {
return aliases;
}
public static List<String> findSimilar(String match) {
return DateTimeField.findSimilar(NAME_TO_PART.keySet(), match);
}
public static Part resolve(String truncateTo) {
return DateTimeField.resolveMatch(NAME_TO_PART, truncateTo);
}
public ZonedDateTime add(ZonedDateTime dateTime, Integer numberOfUnits) {
return addFunction.apply(dateTime, numberOfUnits);
}
}
public DateAdd(Source source, Expression unit, Expression numberOfUnits, Expression timestamp, ZoneId zoneId) {
super(source, unit, numberOfUnits, timestamp, zoneId);
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = isString(first(), sourceText(), Expressions.ParamOrdinal.FIRST);
if (resolution.unresolved()) {
return resolution;
}
if (first().foldable()) {
String datePartValue = (String) first().fold();
if (datePartValue != null && resolveDateTimeField(datePartValue) == false) {
List<String> similar = findSimilarDateTimeFields(datePartValue);
if (similar.isEmpty()) {
return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases; found value [{}]",
sourceText(),
validDateTimeFieldValues(),
Expressions.name(first())));
} else {
return new TypeResolution(format(null, "Unknown value [{}] for first argument of [{}]; did you mean {}?",
Expressions.name(first()),
sourceText(),
similar));
}
}
}
resolution = isInteger(second(), sourceText(), Expressions.ParamOrdinal.SECOND);
if (resolution.unresolved()) {
return resolution;
}
resolution = isDate(third(), sourceText(), Expressions.ParamOrdinal.THIRD);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
public DataType dataType() {
return DataType.DATETIME;
}
@Override
protected ThreeArgsDateTimeFunction replaceChildren(Expression newFirst, Expression newSecond, Expression newThird) {
return new DateAdd(source(), newFirst, newSecond, newThird, zoneId());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DateAdd::new, first(), second(), third(), zoneId());
}
@Override
public Nullability nullable() {
return Nullability.UNKNOWN;
}
@Override
protected Pipe createPipe(Pipe first, Pipe second, Pipe third, ZoneId zoneId) {
return new DateAddPipe(source(), this, first, second, third, zoneId);
}
@Override
protected String scriptMethodName() {
return "dateAdd";
}
@Override
public Object fold() {
return DateAddProcessor.process(first().fold(), second().fold(), third().fold(), zoneId());
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected List<String> validDateTimeFieldValues() {
return Part.VALID_VALUES;
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
public class DateAddPipe extends ThreeArgsDateTimePipe {
public DateAddPipe(Source source, Expression expression, Pipe first, Pipe second, Pipe third, ZoneId zoneId) {
super(source, expression, first, second, third, zoneId);
}
@Override
protected NodeInfo<DateAddPipe> info() {
return NodeInfo.create(this, DateAddPipe::new, expression(), first(), second(), third(), zoneId());
}
@Override
public ThreeArgsDateTimePipe replaceChildren(Pipe newFirst, Pipe newSecond, Pipe newThird) {
return new DateAddPipe(source(), expression(), newFirst, newSecond, newThird, zoneId());
}
@Override
protected Processor makeProcessor(Processor first, Processor second, Processor third, ZoneId zoneId) {
return new DateAddProcessor(first, second, third, zoneId);
}
}

View File

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd.Part;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
public class DateAddProcessor extends ThreeArgsDateTimeProcessor {
public static final String NAME = "dtadd";
public DateAddProcessor(Processor unit, Processor numberOfUnits, Processor timestamp, ZoneId zoneId) {
super(unit, numberOfUnits, timestamp, zoneId);
}
public DateAddProcessor(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object doProcess(Object unit, Object numberOfUnits, Object timestamp, ZoneId zoneId) {
return process(unit, numberOfUnits, timestamp, zoneId);
}
/**
* Used in Painless scripting
*/
public static Object process(Object unit, Object numberOfUnits, Object timestamp, ZoneId zoneId) {
if (unit == null || numberOfUnits == null || timestamp == null) {
return null;
}
if (unit instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", unit);
}
Part datePartField = Part.resolve((String) unit);
if (datePartField == null) {
List<String> similar = Part.findSimilar((String) unit);
if (similar.isEmpty()) {
throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]",
Part.values(), unit);
} else {
throw new SqlIllegalArgumentException("Received value [{}] is not valid date part to add; " +
"did you mean {}?", unit, similar);
}
}
if (numberOfUnits instanceof Integer == false) {
throw new SqlIllegalArgumentException("An integer is required; received [{}]", numberOfUnits);
}
if (timestamp instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp);
}
return datePartField.add(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId), (Integer) numberOfUnits);
}
}

View File

@ -0,0 +1,127 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Nullability;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder;
public abstract class ThreeArgsDateTimeFunction extends ScalarFunction {
private final ZoneId zoneId;
public ThreeArgsDateTimeFunction(Source source, Expression first, Expression second, Expression third, ZoneId zoneId) {
super(source, Arrays.asList(first, second, third));
this.zoneId = zoneId;
}
public Expression first() {
return arguments().get(0);
}
public Expression second() {
return arguments().get(1);
}
public Expression third() {
return arguments().get(2);
}
public ZoneId zoneId() {
return zoneId;
}
protected abstract boolean resolveDateTimeField(String dateTimeField);
protected abstract List<String> findSimilarDateTimeFields(String dateTimeField);
protected abstract List<String> validDateTimeFieldValues();
@Override
public final ThreeArgsDateTimeFunction replaceChildren(List<Expression> newChildren) {
if (newChildren.size() != 3) {
throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]");
}
return replaceChildren(newChildren.get(0), newChildren.get(1), newChildren.get(2));
}
protected abstract ThreeArgsDateTimeFunction replaceChildren(Expression newFirst, Expression newSecond, Expression newThird);
@Override
protected Pipe makePipe() {
return createPipe(Expressions.pipe(first()), Expressions.pipe(second()), Expressions.pipe(third()), zoneId);
}
protected abstract Pipe createPipe(Pipe first, Pipe second, Pipe third, ZoneId zoneId);
@Override
public Nullability nullable() {
return Nullability.TRUE;
}
@Override
public boolean foldable() {
return first().foldable() && second().foldable() && third().foldable();
}
@Override
public ScriptTemplate asScript() {
ScriptTemplate firstScript = asScript(first());
ScriptTemplate secondScript = asScript(second());
ScriptTemplate thirdScript = asScript(third());
return asScriptFrom(firstScript, secondScript, thirdScript);
}
protected ScriptTemplate asScriptFrom(ScriptTemplate firstScript, ScriptTemplate secondScript, ScriptTemplate thirdScript) {
return new ScriptTemplate(
formatTemplate("{sql}." + scriptMethodName() +
"(" + firstScript.template() + "," + secondScript.template() + "," + thirdScript.template() + ",{})"),
paramsBuilder()
.script(firstScript.params())
.script(secondScript.params())
.script(thirdScript.params())
.variable(zoneId.getId())
.build(),
dataType());
}
protected String scriptMethodName() {
return getClass().getSimpleName().toLowerCase(Locale.ROOT);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), zoneId);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
ThreeArgsDateTimeFunction that = (ThreeArgsDateTimeFunction) o;
return zoneId.equals(that.zoneId);
}
}

View File

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public abstract class ThreeArgsDateTimePipe extends Pipe {
private final Pipe first, second, third;
private final ZoneId zoneId;
public ThreeArgsDateTimePipe(Source source, Expression expression, Pipe first, Pipe second, Pipe third, ZoneId zoneId) {
super(source, expression, Arrays.asList(first, second, third));
this.first = first;
this.second = second;
this.third = third;
this.zoneId = zoneId;
}
public Pipe first() {
return first;
}
public Pipe second() {
return second;
}
public Pipe third() {
return third;
}
ZoneId zoneId() {
return zoneId;
}
@Override
public final Pipe replaceChildren(List<Pipe> newChildren) {
if (newChildren.size() != 3) {
throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]");
}
return replaceChildren(newChildren.get(0), newChildren.get(1), newChildren.get(2));
}
public abstract Pipe replaceChildren(Pipe newFirst, Pipe newSecond, Pipe newThird);
@Override
public Processor asProcessor() {
return makeProcessor(first.asProcessor(), second.asProcessor(), third.asProcessor(), zoneId);
}
protected abstract Processor makeProcessor(Processor first, Processor second, Processor third, ZoneId zoneId);
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), first, second, third, zoneId);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
ThreeArgsDateTimePipe that = (ThreeArgsDateTimePipe) o;
return first.equals(that.first) &&
second.equals(that.second) &&
third.equals(that.third) &&
zoneId.equals(that.zoneId);
}
}

View File

@ -0,0 +1,98 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.common.io.SqlStreamInput;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Objects;
public abstract class ThreeArgsDateTimeProcessor implements Processor {
private final Processor first, second, third;
private final ZoneId zoneId;
public ThreeArgsDateTimeProcessor(Processor first, Processor second, Processor third, ZoneId zoneId) {
this.first = first;
this.second = second;
this.third = third;
this.zoneId = zoneId;
}
protected ThreeArgsDateTimeProcessor(StreamInput in) throws IOException {
this.first = in.readNamedWriteable(Processor.class);
this.second = in.readNamedWriteable(Processor.class);
this.third = in.readNamedWriteable(Processor.class);
zoneId = SqlStreamInput.asSqlStream(in).zoneId();
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(first);
out.writeNamedWriteable(second);
out.writeNamedWriteable(third);
}
public Processor first() {
return first;
}
public Processor second() {
return second;
}
public Processor third() {
return third;
}
ZoneId zoneId() {
return zoneId;
}
@Override
public Object process(Object input) {
Object o1 = first().process(input);
if (o1 == null) {
return null;
}
Object o2 = second().process(input);
if (o2 == null) {
return null;
}
Object o3 = third().process(input);
if (o3 == null) {
return null;
}
return doProcess(o1, o2, o3, zoneId());
}
public abstract Object doProcess(Object o1, Object o2, Object o3, ZoneId zoneId);
@Override
public int hashCode() {
return Objects.hash(first, second, third, zoneId);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ThreeArgsDateTimeProcessor that = (ThreeArgsDateTimeProcessor) o;
return Objects.equals(first, that.first) &&
Objects.equals(second, that.second) &&
Objects.equals(third, that.third) &&
Objects.equals(zoneId, that.zoneId);
}
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.script.JodaCompatibleZonedDateTime; import org.elasticsearch.script.JodaCompatibleZonedDateTime;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAddProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePartProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
@ -372,6 +373,10 @@ public final class InternalSqlScriptUtils {
return NonIsoDateTimeExtractor.WEEK_OF_YEAR.extract(asDateTime(dateTime), tzId); return NonIsoDateTimeExtractor.WEEK_OF_YEAR.extract(asDateTime(dateTime), tzId);
} }
public static ZonedDateTime dateAdd(String dateField, Integer numberOfUnits, Object dateTime, String tzId) {
return (ZonedDateTime) DateAddProcessor.process(dateField, numberOfUnits, asDateTime(dateTime) , ZoneId.of(tzId));
}
public static ZonedDateTime dateTrunc(String truncateTo, Object dateTime, String tzId) { public static ZonedDateTime dateTrunc(String truncateTo, Object dateTime, String tzId) {
return (ZonedDateTime) DateTruncProcessor.process(truncateTo, asDateTime(dateTime) , ZoneId.of(tzId)); return (ZonedDateTime) DateTruncProcessor.process(truncateTo, asDateTime(dateTime) , ZoneId.of(tzId));
} }

View File

@ -115,6 +115,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS
String monthName(Object, String) String monthName(Object, String)
Integer quarter(Object, String) Integer quarter(Object, String)
Integer weekOfYear(Object, String) Integer weekOfYear(Object, String)
ZonedDateTime dateAdd(String, Integer, Object, String)
ZonedDateTime dateTrunc(String, Object, String) ZonedDateTime dateTrunc(String, Object, String)
Integer datePart(String, Object, String) Integer datePart(String, Object, String)
IntervalDayTime intervalDayTime(String, String) IntervalDayTime intervalDayTime(String, String)

View File

@ -256,6 +256,34 @@ public class VerifierErrorMessagesTests extends ESTestCase {
accept("SELECT DATE_PART('ms', date) FROM test"); accept("SELECT DATE_PART('ms', date) FROM test");
} }
public void testDateAddInvalidArgs() {
assertEquals("1:8: first argument of [DATE_ADD(int, int, date)] must be [string], found value [int] type [integer]",
error("SELECT DATE_ADD(int, int, date) FROM test"));
assertEquals("1:8: second argument of [DATE_ADD(keyword, keyword, date)] must be [integer], found value [keyword] " +
"type [keyword]", error("SELECT DATE_ADD(keyword, keyword, date) FROM test"));
assertEquals("1:8: third argument of [DATE_ADD(keyword, int, keyword)] must be [date or datetime], found value [keyword] " +
"type [keyword]", error("SELECT DATE_ADD(keyword, int, keyword) FROM test"));
assertEquals("1:8: first argument of [DATE_ADD('invalid', int, date)] must be one of [YEAR, QUARTER, MONTH, DAYOFYEAR, " +
"DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " +
"or their aliases; found value ['invalid']",
error("SELECT DATE_ADD('invalid', int, date) FROM test"));
assertEquals("1:8: Unknown value ['sacinds'] for first argument of [DATE_ADD('sacinds', int, date)]; " +
"did you mean [seconds, second]?",
error("SELECT DATE_ADD('sacinds', int, date) FROM test"));
assertEquals("1:8: Unknown value ['dz'] for first argument of [DATE_ADD('dz', int, date)]; " +
"did you mean [dd, dw, dy, d]?",
error("SELECT DATE_ADD('dz', int, date) FROM test"));
}
public void testDateAddValidArgs() {
accept("SELECT DATE_ADD('weekday', 0, date) FROM test");
accept("SELECT DATE_ADD('dw', 20, date) FROM test");
accept("SELECT DATE_ADD('years', -10, date) FROM test");
accept("SELECT DATE_ADD('dayofyear', 123, date) FROM test");
accept("SELECT DATE_ADD('dy', 30, date) FROM test");
accept("SELECT DATE_ADD('ms', 1, date::date) FROM test");
}
public void testValidDateTimeFunctionsOnTime() { public void testValidDateTimeFunctionsOnTime() {
accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test"); accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test");
accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test"); accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test");

View File

@ -0,0 +1,152 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.tree.SourceTests;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.sql.expression.Expressions.pipe;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource;
public class DateAddPipeTests extends AbstractNodeTestCase<DateAddPipe, Pipe> {
@Override
protected DateAddPipe randomInstance() {
return randomDateAddPipe();
}
private Expression randomDateAddPipeExpression() {
return randomDateAddPipe().expression();
}
public static DateAddPipe randomDateAddPipe() {
return (DateAddPipe) new DateAdd(
randomSource(),
randomStringLiteral(),
randomIntLiteral(),
randomDatetimeLiteral(),
randomZone())
.makePipe();
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (the three parameters of the function) which are tested separately
DateAddPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateAddPipeExpression);
DateAddPipe newB = new DateAddPipe(
b1.source(),
newExpression,
b1.first(),
b1.second(),
b1.third(),
b1.zoneId());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
DateAddPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new DateAddPipe(
newLoc,
b2.expression(),
b2.first(),
b2.second(),
b2.third(),
b2.zoneId());
assertEquals(newB,
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
}
@Override
public void testReplaceChildren() {
DateAddPipe b = randomInstance();
Pipe newFirst = pipe(((Expression) randomValueOtherThan(b.first(), FunctionTestUtils::randomStringLiteral)));
Pipe newSecond = pipe(((Expression) randomValueOtherThan(b.second(), FunctionTestUtils::randomIntLiteral)));
Pipe newThird = pipe(((Expression) randomValueOtherThan(b.third(), FunctionTestUtils::randomDatetimeLiteral)));
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
DateAddPipe newB = new DateAddPipe( b.source(), b.expression(), b.first(), b.second(), b.third(), newZoneId);
ThreeArgsDateTimePipe transformed = newB.replaceChildren(newFirst, b.second(), b.third());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.first(), newFirst);
assertEquals(transformed.second(), b.second());
transformed = newB.replaceChildren(b.first(), newSecond, b.third());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.first(), b.first());
assertEquals(transformed.second(), newSecond);
transformed = newB.replaceChildren(b.first(), b.second(), newThird);
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.first(), b.first());
assertEquals(transformed.second(), b.second());
assertEquals(transformed.third(), newThird);
transformed = newB.replaceChildren(newFirst, newSecond, newThird);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.first(), newFirst);
assertEquals(transformed.second(), newSecond);
assertEquals(transformed.third(), newThird);
}
@Override
protected DateAddPipe mutate(DateAddPipe instance) {
List<Function<DateAddPipe, DateAddPipe>> randoms = new ArrayList<>();
randoms.add(f -> new DateAddPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.first(), FunctionTestUtils::randomStringLiteral))),
f.second(),
f.third(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DateAddPipe(f.source(), f.expression(),
f.first(),
pipe(((Expression) randomValueOtherThan(f.second(), FunctionTestUtils::randomIntLiteral))),
f.third(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DateAddPipe(f.source(), f.expression(),
f.first(),
f.second(),
pipe(((Expression) randomValueOtherThan(f.third(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DateAddPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.first(), FunctionTestUtils::randomStringLiteral))),
pipe(((Expression) randomValueOtherThan(f.second(), FunctionTestUtils::randomIntLiteral))),
pipe(((Expression) randomValueOtherThan(f.third(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
return randomFrom(randoms).apply(instance);
}
@Override
protected DateAddPipe copy(DateAddPipe instance) {
return new DateAddPipe(
instance.source(),
instance.expression(),
instance.first(),
instance.second(),
instance.third(),
instance.zoneId());
}
}

View File

@ -0,0 +1,201 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import static org.elasticsearch.xpack.sql.expression.Literal.NULL;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime;
import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_DATE_WITH_NANOS;
public class DateAddProcessorTests extends AbstractSqlWireSerializingTestCase<DateAddProcessor> {
public static DateAddProcessor randomDateAddProcessor() {
return new DateAddProcessor(
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
new ConstantProcessor(randomInt()),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomZone());
}
@Override
protected DateAddProcessor createTestInstance() {
return randomDateAddProcessor();
}
@Override
protected Reader<DateAddProcessor> instanceReader() {
return DateAddProcessor::new;
}
@Override
protected ZoneId instanceZoneId(DateAddProcessor instance) {
return instance.zoneId();
}
@Override
protected DateAddProcessor mutateInstance(DateAddProcessor instance) {
return new DateAddProcessor(
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
new ConstantProcessor(randomValueOtherThan((Integer) instance.second().process(null), ESTestCase::randomInt)),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone));
}
public void testInvalidInputs() {
SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateAdd(Source.EMPTY,
l(5), l(10), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("A string is required; received [5]", siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateAdd(Source.EMPTY,
l("days"), l("foo"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("An integer is required; received [foo]", siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateAdd(Source.EMPTY,
l("days"), l(10), l("foo"), randomZone()).makePipe().asProcessor().process(null));
assertEquals("A date/datetime is required; received [foo]", siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateAdd(Source.EMPTY,
l("invalid"), l(10), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, " +
"SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]",
siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateAdd(Source.EMPTY,
l("quertar"), l(10), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?",
siae.getMessage());
}
public void testWithNulls() {
assertNull(new DateAdd(Source.EMPTY,
NULL, randomIntLiteral(), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateAdd(Source.EMPTY,
l("days"), NULL, randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateAdd(Source.EMPTY,
l("days"), randomIntLiteral(), NULL, randomZone()).makePipe().asProcessor().process(null));
assertNull(new DateAdd(Source.EMPTY,
NULL, NULL, NULL, randomZone()).makePipe().asProcessor().process(null));
}
public void testAddition() {
ZoneId zoneId = ZoneId.of("Etc/GMT-10");
Literal dateTime = l(dateTime(2019, 9, 3, 18, 10, 37, 123456789));
assertEquals("2029-09-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("years"), l(10), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2009-09-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("years"), l(-10), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2022-03-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("quarters"), l(10), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2017-03-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("quarters"), l(-10), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2021-05-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("month"), l(20), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2018-01-04T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("month"), l(-20), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2020-05-01T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("day"), l(240), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-05-07T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("day"), l(-120), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2020-12-25T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("dayofyear"), l(478), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2018-05-14T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("dayofyear"), l(-478), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2021-12-22T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("weeks"), l(120), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2017-05-17T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("weeks"), l(-120), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2053-06-22T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("weekday"), l(12345), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("1985-11-16T04:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("weekday"), l(-12345), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2020-07-05T05:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("hours"), l(7321), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2018-11-03T03:10:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("hours"), l(-7321), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2021-07-21T01:04:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("minute"), l(987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2017-10-18T07:16:37.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("minute"), l(-987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2020-02-01T11:51:31.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("seconds"), l(12987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-04-06T20:29:43.123456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("seconds"), l(-12987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-09-19T04:56:42.555456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("ms"), l(1298765432), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-08-20T03:24:31.691456789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("ms"), l(-1298765432), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-09-04T04:12:41.111110789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("mcs"), l(123987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-09-04T04:08:33.135802789+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("mcs"), l(-123987654), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-09-04T04:10:37.935855554+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("nanoseconds"), l(812398765), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
assertEquals("2019-09-04T04:10:36.311058024+10:00",
toString((ZonedDateTime) new DateAdd(Source.EMPTY, l("nanoseconds"), l(-812398765), dateTime, zoneId)
.makePipe().asProcessor().process(null)));
}
private String toString(ZonedDateTime dateTime) {
return ISO_DATE_WITH_NANOS.format(dateTime);
}
}

View File

@ -293,6 +293,23 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals(pattern, rq.format()); assertEquals(pattern, rq.format());
} }
public void testTranslateDateAdd_WhereClause_Painless() {
LogicalPlan p = plan("SELECT int FROM test WHERE DATE_ADD('quarter',int, date) > '2018-09-04'::date");
assertTrue(p instanceof Project);
assertTrue(p.children().get(0) instanceof Filter);
Expression condition = ((Filter) p.children().get(0)).condition();
assertFalse(condition.foldable());
QueryTranslation translation = QueryTranslator.toQuery(condition, false);
assertNull(translation.aggFilter);
assertTrue(translation.query instanceof ScriptQuery);
ScriptQuery sc = (ScriptQuery) translation.query;
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils.dateAdd(" +
"params.v0,InternalSqlScriptUtils.docValue(doc,params.v1),InternalSqlScriptUtils.docValue(doc,params.v2)," +
"params.v3),InternalSqlScriptUtils.asDateTime(params.v4)))",
sc.script().toString());
assertEquals("[{v=quarter}, {v=int}, {v=date}, {v=Z}, {v=2018-09-04T00:00:00.000Z}]", sc.script().params().toString());
}
public void testTranslateDateTrunc_WhereClause_Painless() { public void testTranslateDateTrunc_WhereClause_Painless() {
LogicalPlan p = plan("SELECT int FROM test WHERE DATE_TRUNC('month', date) > '2018-09-04'::date"); LogicalPlan p = plan("SELECT int FROM test WHERE DATE_TRUNC('month', date) > '2018-09-04'::date");
assertTrue(p instanceof Project); assertTrue(p instanceof Project);