SQL: Implement DATE_TRUNC function (#46473)
DATE_TRUNC(<truncate field>, <date/datetime>) is a function that allows the user to truncate a timestamp to the specified field by zeroing out the rest of the fields. The function is implemented according to the spec from PostgreSQL: https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC Closes: #46319 (cherry picked from commit b37e96712db1aace09f17b574eb02ff6b942a297)
This commit is contained in:
parent
fa54de7789
commit
d956509394
|
@ -8,7 +8,7 @@
|
|||
[[sql-functions-datetime-interval]]
|
||||
==== Intervals
|
||||
|
||||
A common requirement when dealing with date/time in general revolves around
|
||||
A common requirement when dealing with date/time in general revolves around
|
||||
the notion of `interval`, a topic that is worth exploring in the context of {es} and {es-sql}.
|
||||
|
||||
{es} has comprehensive support for <<date-math, date math>> both inside <<date-math-index-names, index names>> and <<mapping-date-format, queries>>.
|
||||
|
@ -248,6 +248,79 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[filterNow]
|
|||
Currently, using a _precision_ greater than 3 doesn't make any difference to the output of the
|
||||
function as the maximum number of second fractional digits returned is 3 (milliseconds).
|
||||
|
||||
[[sql-functions-datetime-trunc]]
|
||||
==== `DATE_TRUNC`
|
||||
|
||||
.Synopsis:
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
DATE_TRUNC(
|
||||
string_exp, <1>
|
||||
datetime_exp) <2>
|
||||
--------------------------------------------------
|
||||
|
||||
*Input*:
|
||||
|
||||
<1> string expression denoting the unit to which the date/datetime should be truncated to
|
||||
<2> date/datetime expression
|
||||
|
||||
*Output*: datetime
|
||||
|
||||
.Description:
|
||||
|
||||
Truncate the date/datetime to the specified unit by setting all fields that are less significant than the specified
|
||||
one to zero (or one, for day, day of week and month).
|
||||
|
||||
[cols="^,^"]
|
||||
|===
|
||||
2+h|Datetime truncation units
|
||||
|
||||
s|unit
|
||||
s|abbreviations
|
||||
|
||||
| millennium | millennia
|
||||
| century | centuries
|
||||
| decade | decades
|
||||
| year | years, yy, yyyy
|
||||
| quarter | quarters, qq, q
|
||||
| month | months, mm, m
|
||||
| week | weeks, wk, ww
|
||||
| day | days, dd, d
|
||||
| hour | hours, hh
|
||||
| minute | minutes, mi, n
|
||||
| second | seconds, ss, s
|
||||
| millisecond | milliseconds, ms
|
||||
| microsecond | microseconds, mcs
|
||||
| nanosecond | nanoseconds, ns
|
||||
|===
|
||||
|
||||
|
||||
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeMillennium]
|
||||
--------------------------------------------------
|
||||
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeWeek]
|
||||
--------------------------------------------------
|
||||
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeMinutes]
|
||||
--------------------------------------------------
|
||||
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateDecades]
|
||||
--------------------------------------------------
|
||||
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateQuarter]
|
||||
--------------------------------------------------
|
||||
|
||||
[[sql-functions-datetime-day]]
|
||||
==== `DAY_OF_MONTH/DOM/DAY`
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@
|
|||
** <<sql-functions-current-date>>
|
||||
** <<sql-functions-current-time>>
|
||||
** <<sql-functions-current-timestamp>>
|
||||
** <<sql-functions-datetime-trunc>>
|
||||
** <<sql-functions-datetime-day>>
|
||||
** <<sql-functions-datetime-dow>>
|
||||
** <<sql-functions-datetime-doy>>
|
||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.xpack.sql.proto.StringUtils;
|
|||
import org.relique.jdbc.csv.CsvResultSet;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Date;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
|
@ -37,6 +38,9 @@ import static java.sql.Types.INTEGER;
|
|||
import static java.sql.Types.REAL;
|
||||
import static java.sql.Types.SMALLINT;
|
||||
import static java.sql.Types.TINYINT;
|
||||
import static java.time.ZoneOffset.UTC;
|
||||
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.logResultSetMetadata;
|
||||
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.resultSetCurrentData;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
@ -107,7 +111,7 @@ public class JdbcAssert {
|
|||
ResultSetMetaData actualMeta = actual.getMetaData();
|
||||
|
||||
if (logger != null) {
|
||||
JdbcTestUtils.logResultSetMetadata(actual, logger);
|
||||
logResultSetMetadata(actual, logger);
|
||||
}
|
||||
|
||||
if (expectedMeta.getColumnCount() != actualMeta.getColumnCount()) {
|
||||
|
@ -210,7 +214,7 @@ public class JdbcAssert {
|
|||
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
|
||||
|
||||
if (logger != null) {
|
||||
logger.info(JdbcTestUtils.resultSetCurrentData(actual));
|
||||
logger.info(resultSetCurrentData(actual));
|
||||
}
|
||||
|
||||
for (int column = 1; column <= columns; column++) {
|
||||
|
@ -264,6 +268,10 @@ public class JdbcAssert {
|
|||
else if (type == Types.TIMESTAMP || type == Types.TIMESTAMP_WITH_TIMEZONE) {
|
||||
assertEquals(msg, expected.getTimestamp(column), actual.getTimestamp(column));
|
||||
}
|
||||
// then date
|
||||
else if (type == Types.DATE) {
|
||||
assertEquals(msg, convertDateToSystemTimezone(expected.getDate(column)), actual.getDate(column));
|
||||
}
|
||||
// and floats/doubles
|
||||
else if (type == Types.DOUBLE) {
|
||||
assertEquals(msg, (double) expectedObject, (double) actualObject, lenientFloatingNumbers ? 1d : 0.0d);
|
||||
|
@ -301,14 +309,14 @@ public class JdbcAssert {
|
|||
} catch (AssertionError ae) {
|
||||
if (logger != null && actual.next()) {
|
||||
logger.info("^^^ Assertion failure ^^^");
|
||||
logger.info(JdbcTestUtils.resultSetCurrentData(actual));
|
||||
logger.info(resultSetCurrentData(actual));
|
||||
}
|
||||
throw ae;
|
||||
}
|
||||
|
||||
if (actual.next()) {
|
||||
fail("Elasticsearch [" + actual + "] still has data after [" + count + "] entries:\n"
|
||||
+ JdbcTestUtils.resultSetCurrentData(actual));
|
||||
+ resultSetCurrentData(actual));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -328,4 +336,9 @@ public class JdbcAssert {
|
|||
|
||||
return columnType;
|
||||
}
|
||||
|
||||
// Used to convert the DATE read from CSV file to a java.sql.Date at the System's timezone (-Dtests.timezone=XXXX)
|
||||
private static Date convertDateToSystemTimezone(Date date) {
|
||||
return new Date(date.toLocalDate().atStartOfDay(UTC).toInstant().toEpochMilli());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,16 +39,17 @@ CURRENT_DATE |SCALAR
|
|||
CURRENT_TIME |SCALAR
|
||||
CURRENT_TIMESTAMP|SCALAR
|
||||
CURTIME |SCALAR
|
||||
DATE_TRUNC |SCALAR
|
||||
DAY |SCALAR
|
||||
DAYNAME |SCALAR
|
||||
DAYOFMONTH |SCALAR
|
||||
DAYOFWEEK |SCALAR
|
||||
DAYOFYEAR |SCALAR
|
||||
DAY_NAME |SCALAR
|
||||
DAY_OF_MONTH |SCALAR
|
||||
DAY_OF_WEEK |SCALAR
|
||||
DAY_OF_YEAR |SCALAR
|
||||
DOM |SCALAR
|
||||
DAYOFMONTH |SCALAR
|
||||
DAYOFWEEK |SCALAR
|
||||
DAYOFYEAR |SCALAR
|
||||
DAY_NAME |SCALAR
|
||||
DAY_OF_MONTH |SCALAR
|
||||
DAY_OF_WEEK |SCALAR
|
||||
DAY_OF_YEAR |SCALAR
|
||||
DOM |SCALAR
|
||||
DOW |SCALAR
|
||||
DOY |SCALAR
|
||||
HOUR |SCALAR
|
||||
|
|
|
@ -121,6 +121,151 @@ SELECT WEEK(birth_date) week, birth_date FROM test_emp WHERE WEEK(birth_date) >
|
|||
2 |1953-01-07T00:00:00.000Z
|
||||
;
|
||||
|
||||
selectDateTruncWithDateTime
|
||||
schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s
|
||||
SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min,
|
||||
DATE_TRUNC('seconds', '2019-09-04T11:22:33.123Z'::datetime) as dt_sec, DATE_TRUNC('ms', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_millis,
|
||||
DATE_TRUNC('mcs', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_micro, DATE_TRUNC('nanoseconds', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_nano;
|
||||
|
||||
dt_hour | dt_min | dt_sec | dt_millis | dt_micro | dt_nano
|
||||
-------------------------+---------------------------+--------------------------+--------------------------+--------------------------+-------------------------
|
||||
2019-09-04T11:00:00.000Z | 2019-09-04T11:22:00.000Z | 2019-09-04T11:22:33.000Z | 2019-09-04T11:22:33.123Z | 2019-09-04T11:22:33.123Z | 2019-09-04T11:22:33.123Z
|
||||
;
|
||||
|
||||
selectDateTruncWithDate
|
||||
schema::dt_mil:ts|dt_cent:ts|dt_dec:ts|dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts
|
||||
SELECT DATE_TRUNC('millennia', '2019-09-04'::date) as dt_mil, DATE_TRUNC('century', '2019-09-04'::date) as dt_cent,
|
||||
DATE_TRUNC('decades', '2019-09-04'::date) as dt_dec, DATE_TRUNC('year', '2019-09-04'::date) as dt_year,
|
||||
DATE_TRUNC('quarter', '2019-09-04'::date) as dt_quarter, DATE_TRUNC('month', '2019-09-04'::date) as dt_month,
|
||||
DATE_TRUNC('week', '2019-09-04'::date) as dt_week, DATE_TRUNC('day', '2019-09-04'::date) as dt_day;
|
||||
|
||||
dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day
|
||||
-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+-------------------------
|
||||
2000-01-01T00:00:00.000Z | 2000-01-01T00:00:00.000Z | 2010-01-01T00:00:00.000Z | 2019-01-01T00:00:00.000Z | 2019-07-01T00:00:00.000Z | 2019-09-01T00:00:00.000Z | 2019-09-02T00:00:00.000Z | 2019-09-04T00:00:00.000Z
|
||||
;
|
||||
|
||||
selectDateTruncWithField
|
||||
schema::emp_no:i|birth_date:ts|dt_mil:ts|dt_cent:ts|dt_dec:ts|dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts
|
||||
SELECT emp_no, birth_date, DATE_TRUNC('millennium', birth_date) as dt_mil, DATE_TRUNC('centuries', birth_date) as dt_cent,
|
||||
DATE_TRUNC('decades', birth_date) as dt_dec, DATE_TRUNC('year', birth_date) as dt_year, DATE_TRUNC('quarter', birth_date) as dt_quarter,
|
||||
DATE_TRUNC('month', birth_date) as dt_month, DATE_TRUNC('week', birth_date) as dt_week, DATE_TRUNC('day', birth_date) as dt_day
|
||||
FROM test_emp WHERE emp_no >= 10032 AND emp_no <= 10042 ORDER BY 1;
|
||||
|
||||
emp_no | birth_date | dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day
|
||||
--------+-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+-------------------------
|
||||
10032 |1960-08-09 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-08-01 00:00:00.000Z | 1960-08-08 00:00:00.000Z | 1960-08-09 00:00:00.000Z
|
||||
10033 |1956-11-14 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1956-01-01 00:00:00.000Z | 1956-10-01 00:00:00.000Z | 1956-11-01 00:00:00.000Z | 1956-11-12 00:00:00.000Z | 1956-11-14 00:00:00.000Z
|
||||
10034 |1962-12-29 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1962-01-01 00:00:00.000Z | 1962-10-01 00:00:00.000Z | 1962-12-01 00:00:00.000Z | 1962-12-24 00:00:00.000Z | 1962-12-29 00:00:00.000Z
|
||||
10035 |1953-02-08 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1953-01-01 00:00:00.000Z | 1953-01-01 00:00:00.000Z | 1953-02-01 00:00:00.000Z | 1953-02-02 00:00:00.000Z | 1953-02-08 00:00:00.000Z
|
||||
10036 |1959-08-10 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1959-01-01 00:00:00.000Z | 1959-07-01 00:00:00.000Z | 1959-08-01 00:00:00.000Z | 1959-08-10 00:00:00.000Z | 1959-08-10 00:00:00.000Z
|
||||
10037 |1963-07-22 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1963-01-01 00:00:00.000Z | 1963-07-01 00:00:00.000Z | 1963-07-01 00:00:00.000Z | 1963-07-22 00:00:00.000Z | 1963-07-22 00:00:00.000Z
|
||||
10038 |1960-07-20 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-07-18 00:00:00.000Z | 1960-07-20 00:00:00.000Z
|
||||
10039 |1959-10-01 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1959-01-01 00:00:00.000Z | 1959-10-01 00:00:00.000Z | 1959-10-01 00:00:00.000Z | 1959-09-28 00:00:00.000Z | 1959-10-01 00:00:00.000Z
|
||||
10040 | null | null | null | null | null | null | null | null | null
|
||||
10041 | null | null | null | null | null | null | null | null | null
|
||||
10042 | null | null | null | null | null | null | null | null | null
|
||||
;
|
||||
|
||||
selectDateTruncWithNullTruncateField
|
||||
SELECT DATE_TRUNC(null, birth_date) AS dt FROM test_emp LIMIT 5;
|
||||
|
||||
dt:ts
|
||||
------
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
;
|
||||
|
||||
selectDateTruncWithScalars
|
||||
SELECT birth_date, DATE_TRUNC(CAST(CHAR(109) AS VARCHAR), birth_date + INTERVAL 12 YEAR) AS dt FROM test_emp ORDER BY 1 DESC NULLS LAST LIMIT 5;
|
||||
|
||||
birth_date:ts | dt:ts
|
||||
-------------------------+---------------------
|
||||
1965-01-03 00:00:00.000Z | 1977-01-01 00:00:00.000Z
|
||||
1964-10-18 00:00:00.000Z | 1976-10-01 00:00:00.000Z
|
||||
1964-06-11 00:00:00.000Z | 1976-06-01 00:00:00.000Z
|
||||
1964-06-02 00:00:00.000Z | 1976-06-01 00:00:00.000Z
|
||||
1964-04-18 00:00:00.000Z | 1976-04-01 00:00:00.000Z
|
||||
;
|
||||
|
||||
selectDateTruncWithTruncArgFromField
|
||||
SELECT DATE_TRUNC(CONCAT(gender, 'illennium'), birth_date) AS dt FROM test_emp WHERE gender='M' ORDER BY 1 DESC LIMIT 2;
|
||||
|
||||
dt:ts
|
||||
------------------------
|
||||
0999-12-27 00:00:00.000Z
|
||||
0999-12-27 00:00:00.000Z
|
||||
;
|
||||
|
||||
selectDateTruncWithComplexExpressions
|
||||
SELECT gender, birth_date, DATE_TRUNC(CASE WHEN gender = 'M' THEN CONCAT(gender, 'onths') WHEN gender = 'F' THEN 'decade' ELSE 'quarter' END,
|
||||
birth_date + INTERVAL 10 month) AS dt FROM test_emp WHERE dt > '1954-07-01'::date ORDER BY emp_no LIMIT 10;
|
||||
|
||||
gender:s | birth_date:ts | dt:ts
|
||||
------------+--------------------------+---------------------
|
||||
F | 1964-06-02 00:00:00.000Z | 1960-01-01 00:00:00.000Z
|
||||
M | 1959-12-03 00:00:00.000Z | 1960-10-01 00:00:00.000Z
|
||||
M | 1954-05-01 00:00:00.000Z | 1955-03-01 00:00:00.000Z
|
||||
M | 1955-01-21 00:00:00.000Z | 1955-11-01 00:00:00.000Z
|
||||
M | 1958-02-19 00:00:00.000Z | 1958-12-01 00:00:00.000Z
|
||||
null | 1963-06-01 00:00:00.000Z | 1964-04-01 00:00:00.000Z
|
||||
null | 1960-10-04 00:00:00.000Z | 1961-07-01 00:00:00.000Z
|
||||
null | 1963-06-07 00:00:00.000Z | 1964-04-01 00:00:00.000Z
|
||||
null | 1956-02-12 00:00:00.000Z | 1956-10-01 00:00:00.000Z
|
||||
null | 1959-08-19 00:00:00.000Z | 1960-04-01 00:00:00.000Z
|
||||
;
|
||||
|
||||
dateTruncOrderBy
|
||||
schema::emp_no:i|hire_date:ts|dt:ts
|
||||
SELECT emp_no, hire_date, DATE_TRUNC('quarter', hire_date) as dt FROM test_emp ORDER BY dt NULLS LAST, emp_no LIMIT 5;
|
||||
|
||||
emp_no | hire_date | dt
|
||||
--------+--------------------------+-------------------------
|
||||
10009 | 1985-02-18 00:00:00.000Z | 1985-01-01 00:00:00.000Z
|
||||
10048 | 1985-02-24 00:00:00.000Z | 1985-01-01 00:00:00.000Z
|
||||
10098 | 1985-05-13 00:00:00.000Z | 1985-04-01 00:00:00.000Z
|
||||
10061 | 1985-09-17 00:00:00.000Z | 1985-07-01 00:00:00.000Z
|
||||
10076 | 1985-07-09 00:00:00.000Z | 1985-07-01 00:00:00.000Z
|
||||
;
|
||||
|
||||
dateTruncFilter
|
||||
schema::emp_no:i|hire_date:ts|dt:ts
|
||||
SELECT emp_no, hire_date, DATE_TRUNC('quarter', hire_date) as dt FROM test_emp WHERE DATE_TRUNC('quarter', hire_date) > '1994-07-01T00:00:00.000Z'::timestamp ORDER BY emp_no;
|
||||
|
||||
emp_no | hire_date | dt
|
||||
--------+--------------------------+-------------------------
|
||||
10016 | 1995-01-27 00:00:00.000Z | 1995-01-01 00:00:00.000Z
|
||||
10019 | 1999-04-30 00:00:00.000Z | 1999-04-01 00:00:00.000Z
|
||||
10022 | 1995-08-22 00:00:00.000Z | 1995-07-01 00:00:00.000Z
|
||||
10024 | 1997-05-19 00:00:00.000Z | 1997-04-01 00:00:00.000Z
|
||||
10026 | 1995-03-20 00:00:00.000Z | 1995-01-01 00:00:00.000Z
|
||||
10054 | 1995-03-13 00:00:00.000Z | 1995-01-01 00:00:00.000Z
|
||||
10084 | 1995-12-15 00:00:00.000Z | 1995-10-01 00:00:00.000Z
|
||||
10093 | 1996-11-05 00:00:00.000Z | 1996-10-01 00:00:00.000Z
|
||||
;
|
||||
|
||||
dateTruncGroupBy
|
||||
schema::count:l|dt:ts
|
||||
SELECT count(*) as count, DATE_TRUNC('decade', hire_date) dt FROM test_emp GROUP BY dt ORDER BY 2;
|
||||
|
||||
count | dt
|
||||
--------+-------------------------
|
||||
59 | 1980-01-01 00:00:00.000Z
|
||||
41 | 1990-01-01 00:00:00.000Z
|
||||
;
|
||||
|
||||
dateTruncHaving
|
||||
schema::gender:s|dt:ts
|
||||
SELECT gender, max(hire_date) dt FROM test_emp GROUP BY gender HAVING DATE_TRUNC('year', max(hire_date)) >= '1997-01-01T00:00:00.000Z'::timestamp ORDER BY 1;
|
||||
|
||||
gender | dt
|
||||
--------+-------------------------
|
||||
null | 1999-04-30 00:00:00.000Z
|
||||
F | 1997-05-19 00:00:00.000Z
|
||||
;
|
||||
|
||||
//
|
||||
// Aggregate
|
||||
//
|
||||
|
@ -404,4 +549,4 @@ SELECT CAST (CAST (birth_date AS VARCHAR) AS TIMESTAMP) a FROM test_emp WHERE YE
|
|||
a:ts
|
||||
---------------
|
||||
1965-01-03T00:00:00Z
|
||||
;
|
||||
;
|
||||
|
|
|
@ -235,16 +235,17 @@ CURRENT_DATE |SCALAR
|
|||
CURRENT_TIME |SCALAR
|
||||
CURRENT_TIMESTAMP|SCALAR
|
||||
CURTIME |SCALAR
|
||||
DATE_TRUNC |SCALAR
|
||||
DAY |SCALAR
|
||||
DAYNAME |SCALAR
|
||||
DAYOFMONTH |SCALAR
|
||||
DAYOFWEEK |SCALAR
|
||||
DAYOFYEAR |SCALAR
|
||||
DAY_NAME |SCALAR
|
||||
DAY_OF_MONTH |SCALAR
|
||||
DAY_OF_WEEK |SCALAR
|
||||
DAY_OF_YEAR |SCALAR
|
||||
DOM |SCALAR
|
||||
DAYOFMONTH |SCALAR
|
||||
DAYOFWEEK |SCALAR
|
||||
DAYOFYEAR |SCALAR
|
||||
DAY_NAME |SCALAR
|
||||
DAY_OF_MONTH |SCALAR
|
||||
DAY_OF_WEEK |SCALAR
|
||||
DAY_OF_YEAR |SCALAR
|
||||
DOM |SCALAR
|
||||
DOW |SCALAR
|
||||
DOY |SCALAR
|
||||
HOUR |SCALAR
|
||||
|
@ -2411,6 +2412,58 @@ SELECT DAY_OF_MONTH(CAST('2018-02-19T10:23:27Z' AS TIMESTAMP)) AS day;
|
|||
// end::dayOfMonth
|
||||
;
|
||||
|
||||
truncateDateTimeMillennium
|
||||
// tag::truncateDateTimeMillennium
|
||||
SELECT DATE_TRUNC('millennium', '2019-09-04T11:22:33.123Z'::datetime) AS millennium;
|
||||
|
||||
millennium
|
||||
------------------------
|
||||
2000-01-01T00:00:00.000Z
|
||||
// end::truncateDateTimeMillennium
|
||||
;
|
||||
|
||||
truncateDateTimeWeek
|
||||
// tag::truncateDateTimeWeek
|
||||
SELECT DATE_TRUNC('week', '2019-08-24T11:22:33.123Z'::datetime) AS week;
|
||||
|
||||
week
|
||||
------------------------
|
||||
2019-08-19T00:00:00.000Z
|
||||
// end::truncateDateTimeWeek
|
||||
;
|
||||
|
||||
truncateDateTimeMinutes
|
||||
// tag::truncateDateTimeMinutes
|
||||
SELECT DATE_TRUNC('mi', '2019-09-04T11:22:33.123Z'::datetime) AS mins;
|
||||
|
||||
mins
|
||||
------------------------
|
||||
2019-09-04T11:22:00.000Z
|
||||
// end::truncateDateTimeMinutes
|
||||
;
|
||||
|
||||
truncateDateDecades
|
||||
schema::decades:ts
|
||||
// tag::truncateDateDecades
|
||||
SELECT DATE_TRUNC('decade', CAST('2019-09-04' AS DATE)) AS decades;
|
||||
|
||||
decades
|
||||
------------------------
|
||||
2010-01-01T00:00:00.000Z
|
||||
// end::truncateDateDecades
|
||||
;
|
||||
|
||||
truncateDateQuarter
|
||||
schema::quarter:ts
|
||||
// tag::truncateDateQuarter
|
||||
SELECT DATE_TRUNC('quarters', CAST('2019-09-04' AS DATE)) AS quarter;
|
||||
|
||||
quarter
|
||||
------------------------
|
||||
2019-07-01T00:00:00.000Z
|
||||
// end::truncateDateQuarter
|
||||
;
|
||||
|
||||
constantDayOfWeek
|
||||
// tag::dayOfWeek
|
||||
SELECT DAY_OF_WEEK(CAST('2018-02-19T10:23:27Z' AS TIMESTAMP)) AS day;
|
||||
|
|
|
@ -21,6 +21,7 @@ import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE;
|
|||
import static java.time.temporal.ChronoField.HOUR_OF_DAY;
|
||||
import static java.time.temporal.ChronoField.MILLI_OF_SECOND;
|
||||
import static java.time.temporal.ChronoField.MINUTE_OF_HOUR;
|
||||
import static java.time.temporal.ChronoField.NANO_OF_SECOND;
|
||||
import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
|
||||
|
||||
public final class StringUtils {
|
||||
|
@ -40,6 +41,19 @@ public final class StringUtils {
|
|||
.appendOffsetId()
|
||||
.toFormatter(Locale.ROOT);
|
||||
|
||||
public static final DateTimeFormatter ISO_DATE_WITH_NANOS = new DateTimeFormatterBuilder()
|
||||
.parseCaseInsensitive()
|
||||
.append(ISO_LOCAL_DATE)
|
||||
.appendLiteral('T')
|
||||
.appendValue(HOUR_OF_DAY, 2)
|
||||
.appendLiteral(':')
|
||||
.appendValue(MINUTE_OF_HOUR, 2)
|
||||
.appendLiteral(':')
|
||||
.appendValue(SECOND_OF_MINUTE, 2)
|
||||
.appendFraction(NANO_OF_SECOND, 3, 9, true)
|
||||
.appendOffsetId()
|
||||
.toFormatter(Locale.ROOT);
|
||||
|
||||
public static final DateTimeFormatter ISO_TIME_WITH_MILLIS = new DateTimeFormatterBuilder()
|
||||
.parseCaseInsensitive()
|
||||
.appendValue(HOUR_OF_DAY, 2)
|
||||
|
|
|
@ -57,7 +57,6 @@ public final class TypeResolutions {
|
|||
"date", "time", "datetime", "numeric");
|
||||
}
|
||||
|
||||
|
||||
public static TypeResolution isGeo(Expression e, String operationName, ParamOrdinal paramOrd) {
|
||||
return isType(e, DataType::isGeo, operationName, paramOrd, "geo_point", "geo_shape");
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.User;
|
|||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDate;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentTime;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek;
|
||||
|
@ -104,8 +105,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.UCase;
|
|||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Case;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Coalesce;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Greatest;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Iif;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.IfNull;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Iif;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.Least;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod;
|
||||
|
@ -193,6 +194,7 @@ public class FunctionRegistry {
|
|||
def(DayOfMonth.class, DayOfMonth::new, "DAY_OF_MONTH", "DAYOFMONTH", "DAY", "DOM"),
|
||||
def(DayOfWeek.class, DayOfWeek::new, "DAY_OF_WEEK", "DAYOFWEEK", "DOW"),
|
||||
def(DayOfYear.class, DayOfYear::new, "DAY_OF_YEAR", "DAYOFYEAR", "DOY"),
|
||||
def(DateTrunc.class, DateTrunc::new, "DATE_TRUNC"),
|
||||
def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"),
|
||||
def(IsoDayOfWeek.class, IsoDayOfWeek::new, "ISO_DAY_OF_WEEK", "ISODAYOFWEEK", "ISODOW", "IDOW"),
|
||||
def(IsoWeekOfYear.class, IsoWeekOfYear::new, "ISO_WEEK_OF_YEAR", "ISOWEEKOFYEAR", "ISOWEEK", "IWOY", "IW"),
|
||||
|
|
|
@ -8,13 +8,14 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor;
|
||||
|
@ -88,6 +89,7 @@ public final class Processors {
|
|||
entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new));
|
||||
entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new));
|
||||
entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new));
|
||||
entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new));
|
||||
// math
|
||||
entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new));
|
||||
entries.add(new Entry(Processor.class, BinaryOptionalMathProcessor.NAME, BinaryOptionalMathProcessor::new));
|
||||
|
|
|
@ -0,0 +1,250 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.expression.Nullability;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.ChronoField;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
||||
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate;
|
||||
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor.process;
|
||||
import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder;
|
||||
|
||||
public class DateTrunc extends BinaryScalarFunction {
|
||||
|
||||
public enum Part {
|
||||
|
||||
MILLENNIUM(dt -> {
|
||||
int year = dt.getYear();
|
||||
int firstYearOfMillenium = year - (year % 1000);
|
||||
return dt
|
||||
.with(ChronoField.YEAR, firstYearOfMillenium)
|
||||
.with(ChronoField.MONTH_OF_YEAR, 1)
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone());
|
||||
},"millennia"),
|
||||
CENTURY(dt -> {
|
||||
int year = dt.getYear();
|
||||
int firstYearOfCentury = year - (year % 100);
|
||||
return dt
|
||||
.with(ChronoField.YEAR, firstYearOfCentury)
|
||||
.with(ChronoField.MONTH_OF_YEAR, 1)
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone());
|
||||
}, "centuries"),
|
||||
DECADE(dt -> {
|
||||
int year = dt.getYear();
|
||||
int firstYearOfDecade = year - (year % 10);
|
||||
return dt
|
||||
.with(ChronoField.YEAR, firstYearOfDecade)
|
||||
.with(ChronoField.MONTH_OF_YEAR, 1)
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone());
|
||||
}, "decades"),
|
||||
YEAR(dt -> dt
|
||||
.with(ChronoField.MONTH_OF_YEAR, 1)
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone()),
|
||||
"years", "yy", "yyyy"),
|
||||
QUARTER(dt -> {
|
||||
int month = dt.getMonthValue();
|
||||
int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1;
|
||||
return dt
|
||||
.with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter)
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone());
|
||||
}, "quarters", "qq", "q"),
|
||||
MONTH(dt -> dt
|
||||
.with(ChronoField.DAY_OF_MONTH, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone()),
|
||||
"months", "mm", "m"),
|
||||
WEEK(dt -> dt
|
||||
.with(ChronoField.DAY_OF_WEEK, 1)
|
||||
.toLocalDate().atStartOfDay(dt.getZone()),
|
||||
"weeks", "wk", "ww"),
|
||||
DAY(dt -> dt.toLocalDate().atStartOfDay(dt.getZone()), "days", "dd", "d"),
|
||||
HOUR(dt -> {
|
||||
int hour = dt.getHour();
|
||||
return dt.toLocalDate().atStartOfDay(dt.getZone())
|
||||
.with(ChronoField.HOUR_OF_DAY, hour);
|
||||
}, "hours", "hh"),
|
||||
MINUTE(dt -> {
|
||||
int hour = dt.getHour();
|
||||
int minute = dt.getMinute();
|
||||
return dt.toLocalDate().atStartOfDay(dt.getZone())
|
||||
.with(ChronoField.HOUR_OF_DAY, hour)
|
||||
.with(ChronoField.MINUTE_OF_HOUR, minute);
|
||||
}, "minutes", "mi", "n"),
|
||||
SECOND(dt -> dt.with(ChronoField.NANO_OF_SECOND, 0), "seconds", "ss", "s"),
|
||||
MILLISECOND(dt -> {
|
||||
int micros = dt.get(ChronoField.MICRO_OF_SECOND);
|
||||
return dt.with(ChronoField.MILLI_OF_SECOND, (micros / 1000));
|
||||
}, "milliseconds", "ms"),
|
||||
MICROSECOND(dt -> {
|
||||
int nanos = dt.getNano();
|
||||
return dt.with(ChronoField.MICRO_OF_SECOND, (nanos / 1000));
|
||||
}, "microseconds", "mcs"),
|
||||
NANOSECOND(dt -> dt, "nanoseconds", "ns");
|
||||
|
||||
private static final Map<String, Part> NAME_TO_PART;
|
||||
|
||||
static {
|
||||
NAME_TO_PART = new HashMap<>();
|
||||
|
||||
for (Part datePart : Part.values()) {
|
||||
String lowerCaseName = datePart.name().toLowerCase(Locale.ROOT);
|
||||
|
||||
NAME_TO_PART.put(lowerCaseName, datePart);
|
||||
for (String alias : datePart.aliases) {
|
||||
NAME_TO_PART.put(alias, datePart);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Set<String> aliases;
|
||||
private Function<ZonedDateTime, ZonedDateTime> truncateFunction;
|
||||
|
||||
Part(Function<ZonedDateTime, ZonedDateTime> truncateFunction, String... aliases) {
|
||||
this.truncateFunction = truncateFunction;
|
||||
this.aliases = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(aliases)));
|
||||
}
|
||||
|
||||
public static Part resolveTruncate(String truncateTo) {
|
||||
return NAME_TO_PART.get(truncateTo.toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static List<String> findSimilar(String match) {
|
||||
return StringUtils.findSimilar(match, NAME_TO_PART.keySet());
|
||||
}
|
||||
|
||||
public ZonedDateTime truncate(ZonedDateTime dateTime) {
|
||||
return truncateFunction.apply(dateTime);
|
||||
}
|
||||
}
|
||||
|
||||
private final ZoneId zoneId;
|
||||
|
||||
public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) {
|
||||
super(source, truncateTo, timestamp);
|
||||
this.zoneId = zoneId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType dataType() {
|
||||
return DataType.DATETIME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TypeResolution resolveType() {
|
||||
TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST);
|
||||
if (resolution.unresolved()) {
|
||||
return resolution;
|
||||
}
|
||||
|
||||
if (left().foldable()) {
|
||||
String truncateToValue = (String) left().fold();
|
||||
if (truncateToValue != null && Part.resolveTruncate(truncateToValue) == null) {
|
||||
List<String> similar = Part.findSimilar(truncateToValue);
|
||||
if (similar.isEmpty()) {
|
||||
return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]",
|
||||
sourceText(),
|
||||
Part.values(),
|
||||
Expressions.name(left())));
|
||||
} else {
|
||||
return new TypeResolution(format(null, "Unknown value [{}] for first argument of [{}]; did you mean {}?",
|
||||
Expressions.name(left()),
|
||||
sourceText(),
|
||||
similar));
|
||||
}
|
||||
}
|
||||
}
|
||||
resolution = isDate(right(), sourceText(), Expressions.ParamOrdinal.SECOND);
|
||||
if (resolution.unresolved()) {
|
||||
return resolution;
|
||||
}
|
||||
return TypeResolution.TYPE_RESOLVED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BinaryScalarFunction replaceChildren(Expression newTruncateTo, Expression newTimestamp) {
|
||||
return new DateTrunc(source(), newTruncateTo, newTimestamp, zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeInfo<? extends Expression> info() {
|
||||
return NodeInfo.create(this, DateTrunc::new, left(), right(), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pipe makePipe() {
|
||||
return new DateTruncPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Nullability nullable() {
|
||||
return Nullability.TRUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fold() {
|
||||
return process(left().fold(), right().fold(), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) {
|
||||
return new ScriptTemplate(
|
||||
formatTemplate("{sql}.dateTrunc(" + leftScript.template() + "," + rightScript.template()+ ",{})"),
|
||||
paramsBuilder()
|
||||
.script(leftScript.params())
|
||||
.script(rightScript.params())
|
||||
.variable(zoneId.getId())
|
||||
.build(),
|
||||
dataType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.equals(o)) {
|
||||
return false;
|
||||
}
|
||||
DateTrunc dateTrunc = (DateTrunc) o;
|
||||
return Objects.equals(zoneId, dateTrunc.zoneId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DateTruncPipe extends BinaryPipe {
|
||||
|
||||
private final ZoneId zoneId;
|
||||
|
||||
public DateTruncPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
|
||||
super(source, expression, left, right);
|
||||
this.zoneId = zoneId;
|
||||
}
|
||||
|
||||
ZoneId zoneId() {
|
||||
return zoneId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeInfo<DateTruncPipe> info() {
|
||||
return NodeInfo.create(this, DateTruncPipe::new, expression(), left(), right(), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BinaryPipe replaceChildren(Pipe left, Pipe right) {
|
||||
return new DateTruncPipe(source(), expression(), left, right, zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateTruncProcessor asProcessor() {
|
||||
return new DateTruncProcessor(left().asProcessor(), right().asProcessor(), zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.equals(o)) {
|
||||
return false;
|
||||
}
|
||||
DateTruncPipe that = (DateTruncPipe) o;
|
||||
return zoneId.equals(that.zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), zoneId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.common.io.SqlStreamInput;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc.Part;
|
||||
|
||||
public class DateTruncProcessor extends BinaryProcessor {
|
||||
|
||||
public static final String NAME = "dtrunc";
|
||||
|
||||
private final ZoneId zoneId;
|
||||
|
||||
public DateTruncProcessor(Processor source1, Processor source2, ZoneId zoneId) {
|
||||
super(source1, source2);
|
||||
this.zoneId = zoneId;
|
||||
}
|
||||
|
||||
public DateTruncProcessor(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
zoneId = SqlStreamInput.asSqlStream(in).zoneId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWrite(StreamOutput out) {
|
||||
}
|
||||
|
||||
ZoneId zoneId() {
|
||||
return zoneId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object doProcess(Object left, Object right) {
|
||||
return process(left, right, zoneId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in Painless scripting
|
||||
*/
|
||||
public static Object process(Object source1, Object source2, String zoneId) {
|
||||
return process(source1, source2, ZoneId.of(zoneId));
|
||||
}
|
||||
|
||||
static Object process(Object source1, Object source2, ZoneId zoneId) {
|
||||
if (source1 == null || source2 == null) {
|
||||
return null;
|
||||
}
|
||||
if (!(source1 instanceof String)) {
|
||||
throw new SqlIllegalArgumentException("A string is required; received [{}]", source1);
|
||||
}
|
||||
Part truncateDateField = Part.resolveTruncate((String) source1);
|
||||
if (truncateDateField == null) {
|
||||
List<String> similar = Part.findSimilar((String) source1);
|
||||
if (similar.isEmpty()) {
|
||||
throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]",
|
||||
Part.values(), source1);
|
||||
} else {
|
||||
throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for truncation; " + "" +
|
||||
"did you mean {}?", source1, similar);
|
||||
}
|
||||
}
|
||||
|
||||
if (!(source2 instanceof ZonedDateTime)) {
|
||||
throw new SqlIllegalArgumentException("A datetime/date is required; received [{}]", source2);
|
||||
}
|
||||
|
||||
return truncateDateField.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
DateTruncProcessor that = (DateTruncProcessor) o;
|
||||
return zoneId.equals(that.zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(zoneId);
|
||||
}
|
||||
}
|
|
@ -10,14 +10,15 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
|
|||
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor.BinaryOptionalMathOperation;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||
|
@ -369,6 +370,10 @@ public final class InternalSqlScriptUtils {
|
|||
return NonIsoDateTimeExtractor.WEEK_OF_YEAR.extract(asDateTime(dateTime), tzId);
|
||||
}
|
||||
|
||||
public static ZonedDateTime dateTrunc(String truncateTo, Object dateTime, String tzId) {
|
||||
return (ZonedDateTime) DateTruncProcessor.process(truncateTo, asDateTime(dateTime) ,tzId);
|
||||
}
|
||||
|
||||
public static ZonedDateTime asDateTime(Object dateTime) {
|
||||
return (ZonedDateTime) asDateTime(dateTime, false);
|
||||
}
|
||||
|
|
|
@ -148,4 +148,6 @@ public final class DateUtils {
|
|||
nano = nano - nano % (int) Math.pow(10, (9 - precision));
|
||||
return nano;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -115,6 +115,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS
|
|||
String monthName(Object, String)
|
||||
Integer quarter(Object, String)
|
||||
Integer weekOfYear(Object, String)
|
||||
ZonedDateTime dateTrunc(String, Object, String)
|
||||
IntervalDayTime intervalDayTime(String, String)
|
||||
IntervalYearMonth intervalYearMonth(String, String)
|
||||
ZonedDateTime asDateTime(Object)
|
||||
|
|
|
@ -203,6 +203,32 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
assertEquals("1:8: Invalid datetime field [ABS]. Use any datetime function.", error("SELECT EXTRACT(ABS FROM date) FROM test"));
|
||||
}
|
||||
|
||||
public void testDateTruncInvalidArgs() {
|
||||
assertEquals("1:8: first argument of [DATE_TRUNC(int, date)] must be [string], found value [int] type [integer]",
|
||||
error("SELECT DATE_TRUNC(int, date) FROM test"));
|
||||
assertEquals("1:8: second argument of [DATE_TRUNC(keyword, keyword)] must be [date or datetime], found value [keyword] " +
|
||||
"type [keyword]", error("SELECT DATE_TRUNC(keyword, keyword) FROM test"));
|
||||
assertEquals("1:8: first argument of [DATE_TRUNC('invalid', keyword)] must be one of [MILLENNIUM, CENTURY, DECADE, " + "" +
|
||||
"YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " +
|
||||
"or their aliases, found value ['invalid']",
|
||||
error("SELECT DATE_TRUNC('invalid', keyword) FROM test"));
|
||||
assertEquals("1:8: Unknown value ['millenioum'] for first argument of [DATE_TRUNC('millenioum', keyword)]; " +
|
||||
"did you mean [millennium, millennia]?",
|
||||
error("SELECT DATE_TRUNC('millenioum', keyword) FROM test"));
|
||||
assertEquals("1:8: Unknown value ['yyyz'] for first argument of [DATE_TRUNC('yyyz', keyword)]; " +
|
||||
"did you mean [yyyy, yy]?",
|
||||
error("SELECT DATE_TRUNC('yyyz', keyword) FROM test"));
|
||||
}
|
||||
|
||||
public void testDateTruncValidArgs() {
|
||||
accept("SELECT DATE_TRUNC('decade', date) FROM test");
|
||||
accept("SELECT DATE_TRUNC('decades', date) FROM test");
|
||||
accept("SELECT DATE_TRUNC('day', date) FROM test");
|
||||
accept("SELECT DATE_TRUNC('days', date) FROM test");
|
||||
accept("SELECT DATE_TRUNC('dd', date) FROM test");
|
||||
accept("SELECT DATE_TRUNC('d', date) FROM test");
|
||||
}
|
||||
|
||||
public void testValidDateTimeFunctionsOnTime() {
|
||||
accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test");
|
||||
accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test");
|
||||
|
|
|
@ -9,6 +9,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.BitSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
|
@ -27,7 +29,11 @@ public final class FunctionTestUtils {
|
|||
public static Literal randomIntLiteral() {
|
||||
return l(ESTestCase.randomInt());
|
||||
}
|
||||
|
||||
|
||||
public static Literal randomDatetimeLiteral() {
|
||||
return l(ZonedDateTime.ofInstant(Instant.ofEpochMilli(ESTestCase.randomLong()), ESTestCase.randomZone()));
|
||||
}
|
||||
|
||||
public static class Combinations implements Iterable<BitSet> {
|
||||
private int n;
|
||||
private int k;
|
||||
|
@ -41,6 +47,7 @@ public final class FunctionTestUtils {
|
|||
public Iterator<BitSet> iterator() {
|
||||
return new Iterator<BitSet>() {
|
||||
BitSet bs = new BitSet(n);
|
||||
|
||||
{
|
||||
bs.set(0, k);
|
||||
}
|
||||
|
@ -55,9 +62,9 @@ public final class FunctionTestUtils {
|
|||
BitSet old = (BitSet) bs.clone();
|
||||
int b = bs.previousClearBit(n - 1);
|
||||
int b1 = bs.previousSetBit(b);
|
||||
if (b1 == -1)
|
||||
if (b1 == -1) {
|
||||
bs = null;
|
||||
else {
|
||||
} else {
|
||||
bs.clear(b1);
|
||||
bs.set(b1 + 1, b1 + (n - b) + 1);
|
||||
bs.clear(b1 + (n - b) + 1, n);
|
||||
|
|
|
@ -20,6 +20,10 @@ public class DateTimeTestUtils {
|
|||
return ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC);
|
||||
}
|
||||
|
||||
public static ZonedDateTime dateTime(int year, int month, int day, int hour, int minute, int seconds, int nanos) {
|
||||
return ZonedDateTime.of(year, month, day, hour, minute, seconds, nanos, DateUtils.UTC);
|
||||
}
|
||||
|
||||
public static ZonedDateTime dateTime(long millisSinceEpoch) {
|
||||
return DateUtils.asDateTime(millisSinceEpoch);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||
import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
import org.elasticsearch.xpack.sql.tree.SourceTests;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.expression.Expressions.pipe;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
|
||||
import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource;
|
||||
|
||||
public class DateTruncPipeTests extends AbstractNodeTestCase<DateTruncPipe, Pipe> {
|
||||
|
||||
@Override
|
||||
protected DateTruncPipe randomInstance() {
|
||||
return randomDateTruncPipe();
|
||||
}
|
||||
|
||||
private Expression randomDateTruncPipeExpression() {
|
||||
return randomDateTruncPipe().expression();
|
||||
}
|
||||
|
||||
public static DateTruncPipe randomDateTruncPipe() {
|
||||
return (DateTruncPipe) new DateTrunc(
|
||||
randomSource(),
|
||||
randomStringLiteral(),
|
||||
randomStringLiteral(),
|
||||
randomZone())
|
||||
.makePipe();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTransform() {
|
||||
// test transforming only the properties (source, expression),
|
||||
// skipping the children (the two parameters of the binary function) which are tested separately
|
||||
DateTruncPipe b1 = randomInstance();
|
||||
|
||||
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTruncPipeExpression);
|
||||
DateTruncPipe newB = new DateTruncPipe(
|
||||
b1.source(),
|
||||
newExpression,
|
||||
b1.left(),
|
||||
b1.right(),
|
||||
b1.zoneId());
|
||||
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
|
||||
|
||||
DateTruncPipe b2 = randomInstance();
|
||||
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
|
||||
newB = new DateTruncPipe(
|
||||
newLoc,
|
||||
b2.expression(),
|
||||
b2.left(),
|
||||
b2.right(),
|
||||
b2.zoneId());
|
||||
assertEquals(newB,
|
||||
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testReplaceChildren() {
|
||||
DateTruncPipe b = randomInstance();
|
||||
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral)));
|
||||
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral)));
|
||||
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
|
||||
DateTruncPipe newB =
|
||||
new DateTruncPipe(b.source(), b.expression(), b.left(), b.right(), newZoneId);
|
||||
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
|
||||
|
||||
assertEquals(transformed.left(), newLeft);
|
||||
assertEquals(transformed.source(), b.source());
|
||||
assertEquals(transformed.expression(), b.expression());
|
||||
assertEquals(transformed.right(), b.right());
|
||||
|
||||
transformed = newB.replaceChildren(b.left(), newRight);
|
||||
assertEquals(transformed.left(), b.left());
|
||||
assertEquals(transformed.source(), b.source());
|
||||
assertEquals(transformed.expression(), b.expression());
|
||||
assertEquals(transformed.right(), newRight);
|
||||
|
||||
transformed = newB.replaceChildren(newLeft, newRight);
|
||||
assertEquals(transformed.left(), newLeft);
|
||||
assertEquals(transformed.source(), b.source());
|
||||
assertEquals(transformed.expression(), b.expression());
|
||||
assertEquals(transformed.right(), newRight);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTruncPipe mutate(DateTruncPipe instance) {
|
||||
List<Function<DateTruncPipe, DateTruncPipe>> randoms = new ArrayList<>();
|
||||
randoms.add(f -> new DateTruncPipe(f.source(),
|
||||
f.expression(),
|
||||
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
|
||||
f.right(),
|
||||
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
|
||||
randoms.add(f -> new DateTruncPipe(f.source(),
|
||||
f.expression(),
|
||||
f.left(),
|
||||
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
|
||||
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
|
||||
randoms.add(f -> new DateTruncPipe(f.source(),
|
||||
f.expression(),
|
||||
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
|
||||
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
|
||||
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
|
||||
|
||||
return randomFrom(randoms).apply(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTruncPipe copy(DateTruncPipe instance) {
|
||||
return new DateTruncPipe(instance.source(),
|
||||
instance.expression(),
|
||||
instance.left(),
|
||||
instance.right(),
|
||||
instance.zoneId());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor;
|
||||
import org.elasticsearch.xpack.sql.tree.Source;
|
||||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.expression.Literal.NULL;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime;
|
||||
import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_DATE_WITH_NANOS;
|
||||
|
||||
public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase<DateTruncProcessor> {
|
||||
|
||||
public static DateTruncProcessor randomDateTruncProcessor() {
|
||||
return new DateTruncProcessor(
|
||||
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
|
||||
new ConstantProcessor(ZonedDateTime.now()),
|
||||
randomZone());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTruncProcessor createTestInstance() {
|
||||
return randomDateTruncProcessor();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<DateTruncProcessor> instanceReader() {
|
||||
return DateTruncProcessor::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ZoneId instanceZoneId(DateTruncProcessor instance) {
|
||||
return instance.zoneId();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTruncProcessor mutateInstance(DateTruncProcessor instance) {
|
||||
return new DateTruncProcessor(
|
||||
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
|
||||
new ConstantProcessor(ZonedDateTime.now()),
|
||||
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone));
|
||||
}
|
||||
|
||||
public void testInvalidInputs() {
|
||||
SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class,
|
||||
() -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
|
||||
assertEquals("A string is required; received [5]", siae.getMessage());
|
||||
|
||||
siae = expectThrows(SqlIllegalArgumentException.class,
|
||||
() -> new DateTrunc(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null));
|
||||
assertEquals("A datetime/date is required; received [foo]", siae.getMessage());
|
||||
|
||||
siae = expectThrows(SqlIllegalArgumentException.class,
|
||||
() -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
|
||||
assertEquals("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " +
|
||||
"SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]",
|
||||
siae.getMessage());
|
||||
|
||||
siae = expectThrows(SqlIllegalArgumentException.class,
|
||||
() -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
|
||||
assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?",
|
||||
siae.getMessage());
|
||||
}
|
||||
|
||||
public void testWithNulls() {
|
||||
assertNull(new DateTrunc(Source.EMPTY, NULL, randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
|
||||
assertNull(new DateTrunc(Source.EMPTY, l("days"), NULL, randomZone()).makePipe().asProcessor().process(null));
|
||||
assertNull(new DateTrunc(Source.EMPTY, NULL, NULL, randomZone()).makePipe().asProcessor().process(null));
|
||||
}
|
||||
|
||||
public void testTruncation() {
|
||||
ZoneId zoneId = ZoneId.of("Etc/GMT-10");
|
||||
Literal dateTime = l(dateTime(2019, 9, 3, 18, 10, 37, 123456789));
|
||||
|
||||
assertEquals("2000-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("millennia"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2000-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("CENTURY"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2010-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("decades"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("years"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-07-01T00:00:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("quarters"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-01T00:00:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("month"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-02T00:00:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("weeks"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T00:00:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("days"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:00:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("hh"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:10:00.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("mi"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:10:37.000+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("second"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:10:37.123+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("ms"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:10:37.123456+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("mcs"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
assertEquals("2019-09-04T04:10:37.123456789+10:00",
|
||||
toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("nanoseconds"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
}
|
||||
|
||||
public void testTruncationEdgeCases() {
|
||||
ZoneId zoneId = ZoneId.of("Etc/GMT-10");
|
||||
Literal dateTime = l(dateTime(-11412, 9, 3, 18, 10, 37, 123456789));
|
||||
assertEquals("-11000-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("millennia"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
|
||||
dateTime = l(dateTime(-12999, 9, 3, 18, 10, 37, 123456789));
|
||||
assertEquals("-12900-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("centuries"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
|
||||
dateTime = l(dateTime(-32999, 9, 3, 18, 10, 37, 123456789));
|
||||
assertEquals("-32990-01-01T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("decades"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
|
||||
dateTime = l(dateTime(-1234, 9, 3, 18, 10, 37, 123456789));
|
||||
assertEquals("-1234-08-29T00:00:00.000+10:00",
|
||||
DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("week"), dateTime, zoneId)
|
||||
.makePipe().asProcessor().process(null)));
|
||||
}
|
||||
|
||||
private String toString(ZonedDateTime dateTime) {
|
||||
return ISO_DATE_WITH_NANOS.format(dateTime);
|
||||
}
|
||||
}
|
|
@ -293,6 +293,22 @@ public class QueryTranslatorTests extends ESTestCase {
|
|||
assertEquals(pattern, rq.format());
|
||||
}
|
||||
|
||||
public void testTranslateDateTrunc_WhereClause_Painless() {
|
||||
LogicalPlan p = plan("SELECT int FROM test WHERE DATE_TRUNC('month', date) > '2018-09-04'::date");
|
||||
assertTrue(p instanceof Project);
|
||||
assertTrue(p.children().get(0) instanceof Filter);
|
||||
Expression condition = ((Filter) p.children().get(0)).condition();
|
||||
assertFalse(condition.foldable());
|
||||
QueryTranslation translation = QueryTranslator.toQuery(condition, false);
|
||||
assertNull(translation.aggFilter);
|
||||
assertTrue(translation.query instanceof ScriptQuery);
|
||||
ScriptQuery sc = (ScriptQuery) translation.query;
|
||||
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils.dateTrunc(" +
|
||||
"params.v0,InternalSqlScriptUtils.docValue(doc,params.v1),params.v2),InternalSqlScriptUtils.asDateTime(params.v3)))",
|
||||
sc.script().toString());
|
||||
assertEquals("[{v=month}, {v=date}, {v=Z}, {v=2018-09-04T00:00:00.000Z}]", sc.script().params().toString());
|
||||
}
|
||||
|
||||
public void testLikeOnInexact() {
|
||||
LogicalPlan p = plan("SELECT * FROM test WHERE some.string LIKE '%a%'");
|
||||
assertTrue(p instanceof Project);
|
||||
|
|
Loading…
Reference in New Issue