SQL: Rename SQL type DATE to DATETIME (#37395)

* SQL: Rename SQL data type DATE to DATETIME

SQL data type DATE has only the date part (e.g.: 2019-01-14)
without any time information. Previously the SQL type DATE was
referring to the ES DATE which contains also the time part along
with TZ information. To conform with SQL data types the data type
`DATE` is renamed to `DATETIME`, since it includes also the time,
as a new runtime SQL `DATE` data type will be introduced down the road,
which only contains the date part and meets the SQL standard.

Closes: #36440

* Address comments
This commit is contained in:
Marios Trivyzas 2019-01-17 10:17:58 +02:00 committed by GitHub
parent b6e5ccaf8a
commit 1686c32ba9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 315 additions and 309 deletions

View File

@ -68,7 +68,7 @@ Which returns:
{"name": "author", "type": "text"},
{"name": "name", "type": "text"},
{"name": "page_count", "type": "short"},
{"name": "release_date", "type": "date"}
{"name": "release_date", "type": "datetime"}
],
"rows": [
["Peter F. Hamilton", "Pandora's Star", 768, "2004-03-02T00:00:00.000Z"],
@ -186,7 +186,7 @@ Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T
In addition to the `query` and `cursor` fields, the request can
contain `fetch_size` and `time_zone`. `fetch_size` is a hint for how
many results to return in each page. SQL might chose to return more
or fewer results though. `time_zone` is the time zone to use for date
functions and date parsing. `time_zone` defaults to `utc` and can take
or fewer results though. `time_zone` is the time zone to use for datetime
functions and datetime parsing. `time_zone` defaults to `utc` and can take
any values documented
http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here].

View File

@ -22,9 +22,9 @@ The table below shows the mapping between {es} and {es-sql}:
|==========================
s|{es}
s|{es-sql}
2+h| Index/Table date math
2+h| Index/Table datetime math
2+|<index-{now/M{YYYY.MM}}>
2+h| Query date math
2+h| Query date/time math
| 1y | INTERVAL 1 YEAR
| 2M | INTERVAL 2 MONTH
| 3w | INTERVAL 21 DAY
@ -57,7 +57,7 @@ s|Description
==== Operators
Basic arithmetic operators (`+`, `-`, etc) support date-time parameters as indicated below:
Basic arithmetic operators (`+`, `-`, etc) support date/time parameters as indicated below:
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -66,7 +66,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalPlusInterval]
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[dtDatePlusInterval]
include-tagged::{sql-specs}/docs.csv-spec[dtDateTimePlusInterval]
--------------------------------------------------
["source","sql",subs="attributes,callouts,macros"]
@ -81,7 +81,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalMinusInterval]
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[dtDateMinusInterval]
include-tagged::{sql-specs}/docs.csv-spec[dtDateTimeMinusInterval]
--------------------------------------------------
["source","sql",subs="attributes,callouts,macros"]
@ -146,18 +146,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow]
.Synopsis:
[source, sql]
--------------------------------------------------
DAY_OF_MONTH(date_exp<1>)
DAY_OF_MONTH(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the day of the month from a date.
Extract the day of the month from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -170,18 +170,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfMonth]
.Synopsis:
[source, sql]
--------------------------------------------------
DAY_OF_WEEK(date_exp<1>)
DAY_OF_WEEK(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the day of the week from a date. Sunday is `1`, Monday is `2`, etc.
Extract the day of the week from a date/datetime. Sunday is `1`, Monday is `2`, etc.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -194,18 +194,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfWeek]
.Synopsis:
[source, sql]
--------------------------------------------------
DAY_OF_YEAR(date_exp<1>)
DAY_OF_YEAR(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the day of the year from a date.
Extract the day of the year from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -218,18 +218,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfYear]
.Synopsis:
[source, sql]
--------------------------------------------------
DAY_NAME(date_exp<1>)
DAY_NAME(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: string
.Description:
Extract the day of the week from a datetime in text format (`Monday`, `Tuesday`...).
Extract the day of the week from a date/datetime in text format (`Monday`, `Tuesday`...).
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -242,18 +242,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayName]
.Synopsis:
[source, sql]
--------------------------------------------------
HOUR_OF_DAY(date_exp<1>)
HOUR_OF_DAY(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the hour of the day from a date.
Extract the hour of the day from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -266,18 +266,18 @@ include-tagged::{sql-specs}/docs.csv-spec[hourOfDay]
.Synopsis:
[source, sql]
--------------------------------------------------
ISO_DAY_OF_WEEK(date_exp<1>)
ISO_DAY_OF_WEEK(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the day of the week from a date, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard].
Extract the day of the week from a date/datetime, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard].
Monday is `1`, Tuesday is `2`, etc.
["source","sql",subs="attributes,callouts,macros"]
@ -291,18 +291,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoDayOfWeek]
.Synopsis:
[source, sql]
--------------------------------------------------
ISO_WEEK_OF_YEAR(date_exp<1>)
ISO_WEEK_OF_YEAR(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the week of the year from a date, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week
Extract the week of the year from a date/datetime, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week
of a year is the first week with a majority (4 or more) of its days in January.
["source","sql",subs="attributes,callouts,macros"]
@ -316,18 +316,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoWeekOfYear]
.Synopsis:
[source, sql]
--------------------------------------------------
MINUTE_OF_DAY(date_exp<1>)
MINUTE_OF_DAY(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the minute of the day from a date.
Extract the minute of the day from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -340,18 +340,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfDay]
.Synopsis:
[source, sql]
--------------------------------------------------
MINUTE_OF_HOUR(date_exp<1>)
MINUTE_OF_HOUR(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the minute of the hour from a date.
Extract the minute of the hour from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -364,18 +364,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfHour]
.Synopsis:
[source, sql]
--------------------------------------------------
MONTH(date_exp<1>)
MONTH(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the month of the year from a date.
Extract the month of the year from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -388,18 +388,18 @@ include-tagged::{sql-specs}/docs.csv-spec[monthOfYear]
.Synopsis:
[source, sql]
--------------------------------------------------
MONTH_NAME(date_exp<1>)
MONTH_NAME(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: string
.Description:
Extract the month from a datetime in text format (`January`, `February`...).
Extract the month from a date/datetime in text format (`January`, `February`...).
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -417,20 +417,20 @@ NOW()
*Input*: _none_
*Output*: date/time
*Output*: datetime
.Description:
This function offers the same functionality as <<sql-functions-current-timestamp,CURRENT_TIMESTAMP()>> function: returns the date/time
when the current query reached the server. This method always returns the same value within a query.
This function offers the same functionality as <<sql-functions-current-timestamp,CURRENT_TIMESTAMP()>> function: returns
the datetime when the current query reached the server. This method always returns the same value within a query.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[nowFunction]
--------------------------------------------------
Typically, this function (as well as its twin <<sql-functions-current-timestamp,CURRENT_TIMESTAMP())>> function is used for
relative date/time filtering:
Typically, this function (as well as its twin <<sql-functions-current-timestamp,CURRENT_TIMESTAMP())>> function is used
for relative date/time filtering:
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -443,18 +443,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow]
.Synopsis:
[source, sql]
--------------------------------------------------
SECOND_OF_MINUTE(date_exp<1>)
SECOND_OF_MINUTE(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the second of the minute from a date.
Extract the second of the minute from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -467,18 +467,18 @@ include-tagged::{sql-specs}/docs.csv-spec[secondOfMinute]
.Synopsis:
[source, sql]
--------------------------------------------------
QUARTER(date_exp<1>)
QUARTER(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the year quarter the date falls in.
Extract the year quarter the date/datetime falls in.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -491,18 +491,18 @@ include-tagged::{sql-specs}/docs.csv-spec[quarter]
.Synopsis:
[source, sql]
--------------------------------------------------
WEEK_OF_YEAR(date_exp<1>)
WEEK_OF_YEAR(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the week of the year from a date.
Extract the week of the year from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -515,18 +515,18 @@ include-tagged::{sql-specs}/docs.csv-spec[weekOfYear]
.Synopsis:
[source, sql]
--------------------------------------------------
YEAR(date_exp<1>)
YEAR(datetime_exp<1>)
--------------------------------------------------
*Input*:
<1> date expression
<1> date/datetime expression
*Output*: integer
.Description:
Extract the year from a date.
Extract the year from a date/datetime.
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
@ -539,19 +539,19 @@ include-tagged::{sql-specs}/docs.csv-spec[year]
.Synopsis:
[source, sql]
--------------------------------------------------
EXTRACT(datetime_function<1> FROM date_exp<2>)
EXTRACT(datetime_function<1> FROM datetime_exp<2>)
--------------------------------------------------
*Input*:
<1> datetime function name
<2> date expression
<1> date/time function name
<2> date/datetime expression
*Output*: integer
.Description:
Extract fields from a datetime by specifying the name of a <<sql-functions-datetime,datetime function>>.
Extract fields from a date/datetime by specifying the name of a <<sql-functions-datetime,datetime function>>.
The following
["source","sql",subs="attributes,callouts,macros"]

View File

@ -50,7 +50,7 @@ or date/time fields:
["source","sql",subs="attributes,callouts,macros"]
----
include-tagged::{sql-specs}/docs.csv-spec[histogramDate]
include-tagged::{sql-specs}/docs.csv-spec[histogramDateTime]
----
Expressions inside the histogram are also supported as long as the
@ -74,5 +74,5 @@ Instead one can rewrite the query to move the expression on the histogram _insid
["source","sql",subs="attributes,callouts,macros"]
----
include-tagged::{sql-specs}/docs.csv-spec[histogramDateExpression]
include-tagged::{sql-specs}/docs.csv-spec[histogramDateTimeExpression]
----

View File

@ -37,7 +37,7 @@ include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast]
["source","sql",subs="attributes,callouts,macros"]
----
include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast]
include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateTimeCast]
----

View File

@ -28,7 +28,7 @@ public enum EsType implements SQLType {
OBJECT( Types.STRUCT),
NESTED( Types.STRUCT),
BINARY( Types.VARBINARY),
DATE( Types.TIMESTAMP),
DATETIME( Types.TIMESTAMP),
IP( Types.VARCHAR),
INTERVAL_YEAR( ExtraTypes.INTERVAL_YEAR),
INTERVAL_MONTH( ExtraTypes.INTERVAL_MONTH),
@ -64,4 +64,4 @@ public enum EsType implements SQLType {
public Integer getVendorTypeNumber() {
return type;
}
}
}

View File

@ -367,7 +367,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
|| x instanceof Time
|| x instanceof java.util.Date)
{
if (dataType == EsType.DATE) {
if (dataType == EsType.DATETIME) {
// converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization
java.util.Date dateToSet;
if (x instanceof Timestamp) {
@ -532,4 +532,4 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
public long executeLargeUpdate() throws SQLException {
throw new SQLFeatureNotSupportedException("Batching not supported");
}
}
}

View File

@ -245,7 +245,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
// TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported
// jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and
// TIMESTAMP. Should we consider the VARCHAR conversion as a later enhancement?
if (EsType.DATE == type) {
if (EsType.DATETIME == type) {
// the cursor can return an Integer if the date-since-epoch is small enough, XContentParser (Jackson) will
// return the "smallest" data type for numbers when parsing
// TODO: this should probably be handled server side

View File

@ -213,7 +213,7 @@ final class TypeConverter {
return doubleValue(v); // Double might be represented as string for infinity and NaN values
case FLOAT:
return floatValue(v); // Float might be represented as string for infinity and NaN values
case DATE:
case DATETIME:
return JdbcDateUtils.asDateTimeField(v, JdbcDateUtils::asTimestamp, Timestamp::new);
case INTERVAL_YEAR:
case INTERVAL_MONTH:
@ -467,21 +467,21 @@ final class TypeConverter {
}
private static Date asDate(Object val, EsType columnType, String typeString) throws SQLException {
if (columnType == EsType.DATE) {
if (columnType == EsType.DATETIME) {
return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asDate, Date::new);
}
return failConversion(val, columnType, typeString, Date.class);
}
private static Time asTime(Object val, EsType columnType, String typeString) throws SQLException {
if (columnType == EsType.DATE) {
if (columnType == EsType.DATETIME) {
return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTime, Time::new);
}
return failConversion(val, columnType, typeString, Time.class);
}
private static Timestamp asTimestamp(Object val, EsType columnType, String typeString) throws SQLException {
if (columnType == EsType.DATE) {
if (columnType == EsType.DATETIME) {
return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTimestamp, Timestamp::new);
}
return failConversion(val, columnType, typeString, Timestamp.class);
@ -538,4 +538,4 @@ final class TypeConverter {
}
return Math.round(x);
}
}
}

View File

@ -37,7 +37,7 @@ final class TypeUtils {
private static final Set<EsType> SIGNED_TYPE = EnumSet.of(EsType.BYTE,
EsType.SHORT, EsType.INTEGER, EsType.LONG,
EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATE);
EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATETIME);
static {
@ -52,16 +52,16 @@ final class TypeUtils {
aMap.put(String.class, EsType.KEYWORD);
aMap.put(byte[].class, EsType.BINARY);
aMap.put(String.class, EsType.KEYWORD);
aMap.put(Timestamp.class, EsType.DATE);
aMap.put(Timestamp.class, EsType.DATETIME);
// apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP}
// according to B-4 table from the jdbc4.2 spec
aMap.put(Calendar.class, EsType.DATE);
aMap.put(GregorianCalendar.class, EsType.DATE);
aMap.put(java.util.Date.class, EsType.DATE);
aMap.put(java.sql.Date.class, EsType.DATE);
aMap.put(java.sql.Time.class, EsType.DATE);
aMap.put(LocalDateTime.class, EsType.DATE);
aMap.put(Calendar.class, EsType.DATETIME);
aMap.put(GregorianCalendar.class, EsType.DATETIME);
aMap.put(java.util.Date.class, EsType.DATETIME);
aMap.put(java.sql.Date.class, EsType.DATETIME);
aMap.put(java.sql.Time.class, EsType.DATETIME);
aMap.put(LocalDateTime.class, EsType.DATETIME);
CLASS_TO_TYPE = Collections.unmodifiableMap(aMap);
Map<EsType, Class<?>> types = new LinkedHashMap<>();
@ -77,7 +77,7 @@ final class TypeUtils {
types.put(EsType.KEYWORD, String.class);
types.put(EsType.TEXT, String.class);
types.put(EsType.BINARY, byte[].class);
types.put(EsType.DATE, Timestamp.class);
types.put(EsType.DATETIME, Timestamp.class);
types.put(EsType.IP, String.class);
types.put(EsType.INTERVAL_YEAR, Period.class);
types.put(EsType.INTERVAL_MONTH, Period.class);
@ -172,4 +172,4 @@ final class TypeUtils {
}
return dataType;
}
}
}

View File

@ -29,7 +29,7 @@ import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.EsType.BINARY;
import static org.elasticsearch.xpack.sql.jdbc.EsType.BOOLEAN;
import static org.elasticsearch.xpack.sql.jdbc.EsType.BYTE;
import static org.elasticsearch.xpack.sql.jdbc.EsType.DATE;
import static org.elasticsearch.xpack.sql.jdbc.EsType.DATETIME;
import static org.elasticsearch.xpack.sql.jdbc.EsType.DOUBLE;
import static org.elasticsearch.xpack.sql.jdbc.EsType.FLOAT;
import static org.elasticsearch.xpack.sql.jdbc.EsType.HALF_FLOAT;
@ -371,13 +371,13 @@ public class JdbcPreparedStatementTests extends ESTestCase {
Timestamp someTimestamp = new Timestamp(randomLong());
jps.setTimestamp(1, someTimestamp);
assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime());
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
Calendar nonDefaultCal = randomCalendar();
// February 29th, 2016. 01:17:55 GMT = 1456708675000 millis since epoch
jps.setTimestamp(1, new Timestamp(1456708675000L), nonDefaultCal);
assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
long beforeEpochTime = randomLongBetween(Long.MIN_VALUE, 0);
jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal);
@ -404,7 +404,7 @@ public class JdbcPreparedStatementTests extends ESTestCase {
Calendar nonDefaultCal = randomCalendar();
jps.setTime(1, time, nonDefaultCal);
assertEquals(4675000, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
assertTrue(value(jps) instanceof java.util.Date);
jps.setObject(1, time, Types.VARCHAR);
@ -426,13 +426,13 @@ public class JdbcPreparedStatementTests extends ESTestCase {
java.sql.Date someSqlDate = new java.sql.Date(randomLong());
jps.setDate(1, someSqlDate);
assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime());
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
someSqlDate = new java.sql.Date(randomLong());
Calendar nonDefaultCal = randomCalendar();
jps.setDate(1, someSqlDate, nonDefaultCal);
assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
assertTrue(value(jps) instanceof java.util.Date);
jps.setObject(1, someSqlDate, Types.VARCHAR);
@ -456,7 +456,7 @@ public class JdbcPreparedStatementTests extends ESTestCase {
jps.setObject(1, someCalendar);
assertEquals(someCalendar.getTime(), value(jps));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
assertTrue(value(jps) instanceof java.util.Date);
jps.setObject(1, someCalendar, Types.VARCHAR);
@ -466,7 +466,7 @@ public class JdbcPreparedStatementTests extends ESTestCase {
Calendar nonDefaultCal = randomCalendar();
jps.setObject(1, nonDefaultCal);
assertEquals(nonDefaultCal.getTime(), value(jps));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
}
public void testThrownExceptionsWhenSettingCalendarValues() throws SQLException {
@ -483,7 +483,7 @@ public class JdbcPreparedStatementTests extends ESTestCase {
jps.setObject(1, someDate);
assertEquals(someDate, value(jps));
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
assertTrue(value(jps) instanceof java.util.Date);
jps.setObject(1, someDate, Types.VARCHAR);
@ -505,7 +505,7 @@ public class JdbcPreparedStatementTests extends ESTestCase {
jps.setObject(1, ldt);
assertEquals(Date.class, value(jps).getClass());
assertEquals(DATE, jdbcType(jps));
assertEquals(DATETIME, jdbcType(jps));
assertTrue(value(jps) instanceof java.util.Date);
jps.setObject(1, ldt, Types.VARCHAR);

View File

@ -41,8 +41,8 @@ public class TypeConverterTests extends ESTestCase {
public void testTimestampAsNative() throws Exception {
DateTime now = DateTime.now();
assertThat(convertAsNative(now, EsType.DATE), instanceOf(Timestamp.class));
assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATE)).getTime());
assertThat(convertAsNative(now, EsType.DATETIME), instanceOf(Timestamp.class));
assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATETIME)).getTime());
}
private Object convertAsNative(Object value, EsType type) throws Exception {

View File

@ -64,11 +64,12 @@ public abstract class SqlProtocolTestCase extends ESRestTestCase {
}
public void testDateTimes() throws IOException {
assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATE)", "CAST('2019-01-14T12:29:25.000Z' AS DATE)", "date",
"2019-01-14T12:29:25.000Z", 24);
assertQuery("SELECT CAST(-26853765751000 AS DATE)", "CAST(-26853765751000 AS DATE)", "date", "1119-01-15T12:37:29.000Z", 24);
assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "date",
"1119-01-15T12:37:29.000Z", 24);
assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATETIME)", "CAST('2019-01-14T12:29:25.000Z' AS DATETIME)",
"datetime", "2019-01-14T12:29:25.000Z", 24);
assertQuery("SELECT CAST(-26853765751000 AS DATETIME)", "CAST(-26853765751000 AS DATETIME)",
"datetime", "1119-01-15T12:37:29.000Z", 24);
assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)",
"datetime", "1119-01-15T12:37:29.000Z", 24);
}
public void testIPs() throws IOException {

View File

@ -201,10 +201,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getByte("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)),
sqle.getMessage());
});
}
@ -324,10 +324,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getShort("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Short.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)),
sqle.getMessage());
});
}
@ -439,10 +439,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getInt("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Integer.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)),
sqle.getMessage());
});
}
@ -541,10 +541,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getLong("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Long.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)),
sqle.getMessage());
});
}
@ -624,10 +624,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getDouble("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Double.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)),
sqle.getMessage());
});
}
@ -707,10 +707,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getFloat("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Float.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)),
sqle.getMessage());
});
}
@ -768,7 +768,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
assertEquals("Expected: <true> but was: <false> for field " + fld, true, results.getObject(fld, Boolean.class));
}
SQLException sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate1)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate1)),
sqle.getMessage());
results.next();
@ -778,11 +778,11 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
assertEquals("Expected: <false> but was: <true> for field " + fld, false, results.getObject(fld, Boolean.class));
}
sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date"));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)),
sqle.getMessage());
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Boolean.class));
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)),
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)),
sqle.getMessage());
results.next();

View File

@ -223,7 +223,7 @@ SELECT HISTOGRAM(salary, 5000) AS h FROM test_emp GROUP BY h;
70000
;
histogramDate
histogramDateTime
schema::h:ts|c:l
SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h;
@ -247,7 +247,7 @@ null |10
;
histogramDateWithCountAndOrder
histogramDateTimeWithCountAndOrder
schema::h:ts|c:l
SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC;
@ -270,7 +270,7 @@ SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp
null |10
;
histogramDateWithMonthOnTop
histogramDateTimeWithMonthOnTop
schema::h:i|c:l
SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC;
@ -286,7 +286,7 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP B
null |10
;
histogramDateWithYearOnTop
histogramDateTimeWithYearOnTop
schema::h:i|c:l
SELECT HISTOGRAM(YEAR(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC;
h | c

View File

@ -18,13 +18,13 @@ SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY gender;
groupByOnTextOnAliasOrderDesc
SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY g DESC;
groupByOnDate
groupByOnDateTime
SELECT birth_date b FROM "test_emp" GROUP BY birth_date ORDER BY birth_date DESC;
groupByOnDateWithWhereClause
groupByOnDateTimeWithWhereClause
SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC;
groupByOnDateWithWhereAndLimit
groupByOnDateTimeWithWhereAndLimit
SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC LIMIT 1;
groupByOnDateOnAlias
groupByOnDateTimeOnAlias
SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY b ORDER BY birth_date DESC;
groupByOnNumber
@ -62,13 +62,13 @@ SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l
groupByMultiOnTextOnAliasOrderDesc
SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l ORDER BY g, l ASC;
groupByMultiOnDate
groupByMultiOnDateTime
SELECT birth_date b, languages l FROM "test_emp" GROUP BY birth_date, languages ORDER BY birth_date DESC, languages;
groupByMultiOnDateWithWhereClause
groupByMultiOnDateTimeWithWhereClause
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages;
groupByMultiOnDateWithWhereAndLimit
groupByMultiOnDateTimeWithWhereAndLimit
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages LIMIT 1;
groupByMultiOnDateOnAlias
groupByMultiOnDateTimeOnAlias
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY b, l ORDER BY birth_date DESC, languages;
groupByMultiAddScalar
@ -248,7 +248,7 @@ aggMinWithCastAndFilter
SELECT gender g, CAST(MIN(emp_no) AS SMALLINT) m, COUNT(1) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender ORDER BY gender;
aggMinWithAlias
SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender;
aggMinOnDate
aggMinOnDateTime
SELECT gender, MIN(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender;
// Conditional MIN
@ -304,7 +304,7 @@ aggMaxAndCountWithFilterAndLimit
SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "test_emp" WHERE emp_no > 10000 GROUP BY gender ORDER BY gender LIMIT 1;
aggMaxWithAlias
SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender;
aggMaxOnDate
aggMaxOnDateTime
SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender;
aggAvgAndMaxWithLikeFilter
SELECT CAST(AVG(salary) AS LONG) AS avg, CAST(SUM(salary) AS LONG) AS s FROM "test_emp" WHERE first_name LIKE 'G%';
@ -482,9 +482,9 @@ selectCountWhereIsNull
SELECT COUNT(*) count FROM test_emp WHERE first_name IS NULL;
selectLanguagesCountWithNullsAndGroupByLanguage
SELECT languages l, COUNT(*) c FROM test_emp GROUP BY languages ORDER BY languages;
selectHireDateGroupByHireDate
selectHireDateTimeGroupByHireDateTime
SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC;
selectHireDateGroupByHireDate
selectHireDateTimeGroupByHireDateTime
SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC;
selectSalaryGroupBySalary
SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC;

View File

@ -28,13 +28,13 @@ DESCRIBE test_alias;
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
extra |STRUCT |object
extra.info |STRUCT |object
@ -44,7 +44,7 @@ extra_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -56,13 +56,13 @@ DESCRIBE "test_*";
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
extra |STRUCT |object
extra.info |STRUCT |object
@ -72,7 +72,7 @@ extra_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword

View File

@ -228,13 +228,13 @@ DESCRIBE LIKE 'test_emp';
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
extra |STRUCT |object
extra.info |STRUCT |object
@ -244,7 +244,7 @@ extra_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -256,13 +256,13 @@ DESCRIBE LIKE 'test_emp%';
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
extra |STRUCT |object
extra.info |STRUCT |object
@ -272,7 +272,7 @@ extra_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -284,18 +284,18 @@ DESCRIBE "test_emp";
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -310,18 +310,18 @@ DESCRIBE "test_*,-test_alias*";
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword

View File

@ -141,7 +141,7 @@ INTERVAL 1 DAY + INTERVAL 53 MINUTES
;
datePlusIntervalInline
SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result;
SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result;
result
--------------------
@ -183,7 +183,7 @@ SELECT -2 * INTERVAL '1 23:45' DAY TO MINUTES AS result;
;
dateMinusInterval
SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result;
SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result;
result
--------------------
@ -288,4 +288,4 @@ SELECT birth_date, MAX(hire_date) - INTERVAL 1 YEAR AS f FROM test_emp GROUP BY
1952-05-15T00:00:00Z|1953
1952-06-13T00:00:00Z|1953
1952-07-08T00:00:00Z|1953
;
;

View File

@ -54,7 +54,7 @@ d:i | l:s
;
//
// Date
// DateTime
//
dateTimeIsoDayOfWeek
SELECT ISO_DAY_OF_WEEK(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY ISO_DAY_OF_WEEK(birth_date);
@ -380,4 +380,4 @@ Berni
Bezalel
Bojan
;
;

View File

@ -10,7 +10,7 @@
// This has implications on the results, which could change given specific locales where the rules for determining the start of a year are different.
//
// Date
// DateTime
//
dateTimeDay
@ -25,10 +25,10 @@ SELECT MONTH(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORD
dateTimeYear
SELECT YEAR(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
monthNameFromStringDate
monthNameFromStringDateTime
SELECT MONTHNAME(CAST('2018-09-03' AS TIMESTAMP)) month FROM "test_emp" limit 1;
dayNameFromStringDate
dayNameFromStringDateTime
SELECT DAYNAME(CAST('2018-09-03' AS TIMESTAMP)) day FROM "test_emp" limit 1;
quarterSelect

View File

@ -14,18 +14,18 @@ DESCRIBE emp;
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -53,18 +53,18 @@ SHOW COLUMNS IN emp;
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword
@ -746,9 +746,9 @@ SELECT HISTOGRAM(salary % 100, 10) AS h, COUNT(*) AS c FROM emp GROUP BY h;
// end::histogramNumericExpression
;
histogramDate
histogramDateTime
schema::h:ts|c:l
// tag::histogramDate
// tag::histogramDateTime
SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) AS c FROM emp GROUP BY h;
@ -770,7 +770,7 @@ null |10
1963-02-07T00:00:00Z|7
1964-02-02T00:00:00Z|5
// end::histogramDate
// end::histogramDateTime
;
expressionOnHistogramNotAllowed-Ignore
@ -778,9 +778,9 @@ expressionOnHistogramNotAllowed-Ignore
SELECT MONTH(HISTOGRAM(birth_date), 2)) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC;
// end::expressionOnHistogramNotAllowed
histogramDateExpression
histogramDateTimeExpression
schema::h:i|c:l
// tag::histogramDateExpression
// tag::histogramDateTimeExpression
SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC;
h | c
@ -794,12 +794,12 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h O
0 |6
null |10
// end::histogramDateExpression
// end::histogramDateTimeExpression
;
///////////////////////////////
//
// Date/Time
// DateTime/Time
//
///////////////////////////////
@ -816,14 +816,14 @@ SELECT INTERVAL 1 DAY + INTERVAL 53 MINUTES AS result;
;
dtDatePlusInterval
// tag::dtDatePlusInterval
SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result;
dtDateTimePlusInterval
// tag::dtDateTimePlusInterval
SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result;
result
--------------------
2018-05-13T12:34:56Z
// end::dtDatePlusInterval
// end::dtDateTimePlusInterval
;
dtMinusInterval
@ -848,14 +848,14 @@ SELECT INTERVAL '1' DAY - INTERVAL '2' HOURS AS result;
;
dtDateMinusInterval
// tag::dtDateMinusInterval
SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result;
dtDateTimeMinusInterval
// tag::dtDateTimeMinusInterval
SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result;
result
--------------------
2015-09-13T12:34:56Z
// end::dtDateMinusInterval
// end::dtDateTimeMinusInterval
;
dtIntervalMul
@ -1360,14 +1360,14 @@ SELECT CAST(123 AS VARCHAR) AS string;
// end::conversionIntToStringCast
;
conversionStringToDateCast
// tag::conversionStringToDateCast
conversionStringToDateTimeCast
// tag::conversionStringToDateTimeCast
SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year;
year
---------------
2018
// end::conversionStringToDateCast
// end::conversionStringToDateTimeCast
;
///////////////////////////////
@ -1918,7 +1918,7 @@ elastic
///////////////////////////////
//
// Date-Time functions
// DateTime-Time functions
//
///////////////////////////////

View File

@ -8,18 +8,18 @@ DESCRIBE test_emp;
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |date
birth_date |TIMESTAMP |datetime
dep |STRUCT |nested
dep.dep_id |VARCHAR |keyword
dep.dep_name |VARCHAR |text
dep.dep_name.keyword|VARCHAR |keyword
dep.from_date |TIMESTAMP |date
dep.to_date |TIMESTAMP |date
dep.from_date |TIMESTAMP |datetime
dep.to_date |TIMESTAMP |datetime
emp_no |INTEGER |integer
first_name |VARCHAR |text
first_name.keyword |VARCHAR |keyword
gender |VARCHAR |keyword
hire_date |TIMESTAMP |date
hire_date |TIMESTAMP |datetime
languages |TINYINT |byte
last_name |VARCHAR |text
last_name.keyword |VARCHAR |keyword

View File

@ -3,7 +3,7 @@
//
dateTimeOverNull
SELECT YEAR(CAST(NULL AS DATE)) d;
SELECT YEAR(CAST(NULL AS DATETIME)) d;
d:i
null

View File

@ -33,7 +33,7 @@ SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, 2147483647, null, null,
null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT null, 'test2', 'date', 93, 'DATE', 24, 8, null, null,
SELECT null, 'test2', 'date', 93, 'DATETIME', 24, 8, null, null,
1, -- columnNullable
null, null, 9, 3, null, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL

View File

@ -384,7 +384,7 @@ public class IndexResolver {
// TODO: to check whether isSearchable/isAggregateable takes into account the presence of the normalizer
boolean normalized = false;
return new KeywordEsField(fieldName, props, isAggregateable, length, normalized);
case DATE:
case DATETIME:
return new DateEsField(fieldName, props, isAggregateable);
case UNSUPPORTED:
return new UnsupportedEsField(fieldName, typeName);
@ -481,4 +481,4 @@ public class IndexResolver {
}
}
}
}
}

View File

@ -128,7 +128,7 @@ public class FieldHitExtractor implements HitExtractor {
if (values instanceof Map) {
throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName);
}
if (dataType == DataType.DATE) {
if (dataType == DataType.DATETIME) {
if (values instanceof String) {
return DateUtils.of(Long.parseLong(values.toString()));
}

View File

@ -80,7 +80,7 @@ public final class Expressions {
}
public static Nullability nullable(List<? extends Expression> exps) {
return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new));
return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new));
}
public static boolean foldable(List<? extends Expression> exps) {
@ -171,25 +171,25 @@ public final class Expressions {
}
public static TypeResolution typeMustBeDate(Expression e, String operationName, ParamOrdinal paramOrd) {
return typeMustBe(e, dt -> dt == DataType.DATE, operationName, paramOrd, "date");
return typeMustBe(e, dt -> dt == DataType.DATETIME, operationName, paramOrd, "date");
}
public static TypeResolution typeMustBeNumericOrDate(Expression e, String operationName, ParamOrdinal paramOrd) {
return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATE, operationName, paramOrd, "numeric", "date");
return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATETIME, operationName, paramOrd, "numeric", "date");
}
public static TypeResolution typeMustBe(Expression e,
Predicate<DataType> predicate,
String operationName,
ParamOrdinal paramOrd,
String... acceptedTypes) {
Predicate<DataType> predicate,
String operationName,
ParamOrdinal paramOrd,
String... acceptedTypes) {
return predicate.test(e.dataType()) || DataTypes.isNull(e.dataType())?
TypeResolution.TYPE_RESOLVED :
new TypeResolution(format(Locale.ROOT, "[%s]%s argument must be [%s], found value [%s] type [%s]",
operationName,
paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT),
Strings.arrayToDelimitedString(acceptedTypes, " or "),
Expressions.name(e),
e.dataType().esType));
operationName,
paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT),
Strings.arrayToDelimitedString(acceptedTypes, " or "),
Expressions.name(e),
e.dataType().esType));
}
}

View File

@ -42,7 +42,7 @@ public class Histogram extends GroupingFunction {
TypeResolution resolution = Expressions.typeMustBeNumericOrDate(field(), "HISTOGRAM", ParamOrdinal.FIRST);
if (resolution == TypeResolution.TYPE_RESOLVED) {
// interval must be Literal interval
if (field().dataType() == DataType.DATE) {
if (field().dataType() == DataType.DATETIME) {
resolution = Expressions.typeMustBe(interval, DataTypes::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval");
} else {
resolution = Expressions.typeMustBeNumeric(interval, "(Numeric) HISTOGRAM", ParamOrdinal.SECOND);
@ -81,4 +81,4 @@ public class Histogram extends GroupingFunction {
}
return false;
}
}
}

View File

@ -21,7 +21,7 @@ public class CurrentDateTime extends ConfigurationFunction {
private final ZonedDateTime dateTime;
public CurrentDateTime(Source source, Expression precision, Configuration configuration) {
super(source, configuration, DataType.DATE);
super(source, configuration, DataType.DATETIME);
this.precision = precision;
int p = precision != null ? ((Number) precision.fold()).intValue() : 0;
this.dateTime = nanoPrecision(configuration().now(), p);

View File

@ -79,7 +79,7 @@ public interface ScriptWeaver {
default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) {
String template = "{}";
if (aggregate.dataType() == DataType.DATE) {
if (aggregate.dataType() == DataType.DATETIME) {
template = "{sql}.asDateTime({})";
}
return new ScriptTemplate(processScript(template),
@ -89,7 +89,7 @@ public interface ScriptWeaver {
default ScriptTemplate scriptWithGrouping(GroupingFunctionAttribute grouping) {
String template = "{}";
if (grouping.dataType() == DataType.DATE) {
if (grouping.dataType() == DataType.DATETIME) {
template = "{sql}.asDateTime({})";
}
return new ScriptTemplate(processScript(template),
@ -110,4 +110,4 @@ public interface ScriptWeaver {
default String formatTemplate(String template) {
return Scripts.formatTemplate(template);
}
}
}

View File

@ -411,9 +411,9 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
case "float":
case "double":
return DataType.DOUBLE;
case "date":
case "datetime":
case "timestamp":
return DataType.DATE;
return DataType.DATETIME;
case "char":
case "varchar":
case "string":
@ -793,7 +793,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} catch(IllegalArgumentException ex) {
throw new ParsingException(source, "Invalid date received; {}", ex.getMessage());
}
return new Literal(source, DateUtils.of(dt), DataType.DATE);
return new Literal(source, DateUtils.of(dt), DataType.DATETIME);
}
@Override
@ -829,7 +829,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} catch (IllegalArgumentException ex) {
throw new ParsingException(source, "Invalid timestamp received; {}", ex.getMessage());
}
return new Literal(source, DateUtils.of(dt), DataType.DATE);
return new Literal(source, DateUtils.of(dt), DataType.DATETIME);
}
@Override
@ -930,4 +930,4 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
return null;
}
}
}

View File

@ -284,7 +284,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
if (matchingGroup != null) {
if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) {
Processor action = null;
ZoneId zi = DataType.DATE == exp.dataType() ? DateUtils.UTC : null;
ZoneId zi = DataType.DATETIME == exp.dataType() ? DateUtils.UTC : null;
/*
* special handling of dates since aggs return the typed Date object which needs
* extraction instead of handling this in the scroller, the folder handles this
@ -335,7 +335,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
// check if the field is a date - if so mark it as such to interpret the long as a date
// UTC is used since that's what the server uses and there's no conversion applied
// (like for date histograms)
ZoneId zi = DataType.DATE == child.dataType() ? DateUtils.UTC : null;
ZoneId zi = DataType.DATETIME == child.dataType() ? DateUtils.UTC : null;
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi));
}
// handle histogram
@ -359,7 +359,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
matchingGroup = groupingContext.groupFor(ne);
Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne));
ZoneId zi = DataType.DATE == ne.dataType() ? DateUtils.UTC : null;
ZoneId zi = DataType.DATETIME == ne.dataType() ? DateUtils.UTC : null;
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi));
}
}

View File

@ -275,7 +275,7 @@ final class QueryTranslator {
Expression field = h.field();
// date histogram
if (h.dataType() == DataType.DATE) {
if (h.dataType() == DataType.DATETIME) {
long intervalAsMillis = Intervals.inMillis(h.interval());
// TODO: set timezone
if (field instanceof FieldAttribute) {

View File

@ -15,7 +15,7 @@ import java.time.ZoneId;
import java.util.Objects;
/**
* GROUP BY key based on histograms on date fields.
* GROUP BY key based on histograms on date/datetime fields.
*/
public class GroupByDateHistogram extends GroupByKey {

View File

@ -39,7 +39,7 @@ public abstract class GroupByKey extends Agg {
builder.valueType(ValueType.DOUBLE);
} else if (script.outputType().isString()) {
builder.valueType(ValueType.STRING);
} else if (script.outputType() == DataType.DATE) {
} else if (script.outputType() == DataType.DATETIME) {
builder.valueType(ValueType.DATE);
} else if (script.outputType() == DataType.BOOLEAN) {
builder.valueType(ValueType.BOOLEAN);
@ -78,4 +78,4 @@ public abstract class GroupByKey extends Agg {
&& Objects.equals(script, ((GroupByKey) obj).script)
&& Objects.equals(direction, ((GroupByKey) obj).direction);
}
}
}

View File

@ -183,7 +183,7 @@ public class QueryContainer {
List<FieldExtraction> nestedRefs = new ArrayList<>();
String name = aliasName(attr);
String format = attr.field().getDataType() == DataType.DATE ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT;
String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT;
Query q = rewriteToContainNestedField(query, attr.source(),
attr.nestedParent().name(), name, format, attr.field().isAggregatable());
@ -362,4 +362,4 @@ public class QueryContainer {
throw new RuntimeException("error rendering", e);
}
}
}
}

View File

@ -49,7 +49,7 @@ public class SearchHitFieldRef extends FieldReference {
return;
}
if (docValue) {
String format = dataType == DataType.DATE ? "epoch_millis" : null;
String format = dataType == DataType.DATETIME ? "epoch_millis" : null;
sourceBuilder.addDocField(name, format);
} else {
sourceBuilder.addSourceField(name);

View File

@ -44,7 +44,7 @@ public enum DataType {
// since ODBC and JDBC interpret precision for Date as display size,
// the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone)
// see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288
DATE( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true),
DATETIME( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true),
//
// specialized types
//
@ -102,9 +102,9 @@ public enum DataType {
odbcToEs.put("SQL_LONGVARBINARY", BINARY);
// Date
odbcToEs.put("SQL_DATE", DATE);
odbcToEs.put("SQL_TIME", DATE);
odbcToEs.put("SQL_TIMESTAMP", DATE);
odbcToEs.put("SQL_DATE", DATETIME);
odbcToEs.put("SQL_TIME", DATETIME);
odbcToEs.put("SQL_TIMESTAMP", DATETIME);
// Intervals
odbcToEs.put("SQL_INTERVAL_HOUR_TO_MINUTE", INTERVAL_HOUR_TO_MINUTE);
@ -225,10 +225,14 @@ public enum DataType {
* For any dataType DataType.fromTypeName(dataType.esType) == dataType
*/
public static DataType fromTypeName(String esType) {
String uppercase = esType.toUpperCase(Locale.ROOT);
if (uppercase.equals("DATE")) {
return DataType.DATETIME;
}
try {
return DataType.valueOf(esType.toUpperCase(Locale.ROOT));
return DataType.valueOf(uppercase);
} catch (IllegalArgumentException ex) {
return DataType.UNSUPPORTED;
}
}
}
}

View File

@ -17,7 +17,7 @@ import java.util.function.Function;
import java.util.function.LongFunction;
import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN;
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
import static org.elasticsearch.xpack.sql.type.DataType.DATETIME;
import static org.elasticsearch.xpack.sql.type.DataType.LONG;
import static org.elasticsearch.xpack.sql.type.DataType.NULL;
@ -83,7 +83,7 @@ public abstract class DataTypeConversion {
}
if (DataTypes.isInterval(right)) {
if (left == DATE) {
if (left == DATETIME) {
return left;
}
}
@ -145,8 +145,8 @@ public abstract class DataTypeConversion {
return conversionToFloat(from);
case DOUBLE:
return conversionToDouble(from);
case DATE:
return conversionToDate(from);
case DATETIME:
return conversionToDateTime(from);
case BOOLEAN:
return conversionToBoolean(from);
default:
@ -156,7 +156,7 @@ public abstract class DataTypeConversion {
}
private static Conversion conversionToString(DataType from) {
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_STRING;
}
return Conversion.OTHER_TO_STRING;
@ -182,7 +182,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_LONG;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_LONG;
}
return null;
@ -201,7 +201,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_INT;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_INT;
}
return null;
@ -220,7 +220,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_SHORT;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_SHORT;
}
return null;
@ -239,7 +239,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_BYTE;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_BYTE;
}
return null;
@ -258,7 +258,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_FLOAT;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_FLOAT;
}
return null;
@ -277,13 +277,13 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_DOUBLE;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_DOUBLE;
}
return null;
}
private static Conversion conversionToDate(DataType from) {
private static Conversion conversionToDateTime(DataType from) {
if (from.isRational()) {
return Conversion.RATIONAL_TO_DATE;
}
@ -306,7 +306,7 @@ public abstract class DataTypeConversion {
if (from.isString()) {
return Conversion.STRING_TO_BOOLEAN;
}
if (from == DATE) {
if (from == DATETIME) {
return Conversion.DATE_TO_BOOLEAN;
}
return null;

View File

@ -12,7 +12,7 @@ import java.time.ZonedDateTime;
import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN;
import static org.elasticsearch.xpack.sql.type.DataType.BYTE;
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
import static org.elasticsearch.xpack.sql.type.DataType.DATETIME;
import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE;
import static org.elasticsearch.xpack.sql.type.DataType.FLOAT;
import static org.elasticsearch.xpack.sql.type.DataType.INTEGER;
@ -68,7 +68,7 @@ public final class DataTypes {
return SHORT;
}
if (value instanceof ZonedDateTime) {
return DATE;
return DATETIME;
}
if (value instanceof String || value instanceof Character) {
return KEYWORD;
@ -166,7 +166,7 @@ public final class DataTypes {
// https://docs.microsoft.com/en-us/sql/relational-databases/native-client-odbc-date-time/metadata-catalog
// https://github.com/elastic/elasticsearch/issues/30386
public static Integer metaSqlDataType(DataType t) {
if (t == DATE) {
if (t == DATETIME) {
// ODBC SQL_DATETME
return Integer.valueOf(9);
}
@ -177,7 +177,7 @@ public final class DataTypes {
// https://github.com/elastic/elasticsearch/issues/30386
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017
public static Integer metaSqlDateTimeSub(DataType t) {
if (t == DATE) {
if (t == DATETIME) {
// ODBC SQL_CODE_TIMESTAMP
return Integer.valueOf(3);
}
@ -188,7 +188,7 @@ public final class DataTypes {
// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits?view=sql-server-2017
public static Short metaSqlMinimumScale(DataType t) {
// TODO: return info for HALF/SCALED_FLOATS (should be based on field not type)
if (t == DATE) {
if (t == DATETIME) {
return Short.valueOf((short) 3);
}
if (t.isInteger()) {
@ -203,7 +203,7 @@ public final class DataTypes {
public static Short metaSqlMaximumScale(DataType t) {
// TODO: return info for HALF/SCALED_FLOATS (should be based on field not type)
if (t == DATE) {
if (t == DATETIME) {
return Short.valueOf((short) 3);
}
if (t.isInteger()) {
@ -223,4 +223,4 @@ public final class DataTypes {
// null means radix is not applicable for the given type.
return t.isInteger() ? Integer.valueOf(10) : (t.isRational() ? Integer.valueOf(2) : null);
}
}
}

View File

@ -21,7 +21,7 @@ public class DateEsField extends EsField {
private final List<String> formats;
public DateEsField(String name, Map<String, EsField> properties, boolean hasDocValues, String... formats) {
super(name, DataType.DATE, properties, hasDocValues);
super(name, DataType.DATETIME, properties, hasDocValues);
this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats);
}

View File

@ -86,7 +86,7 @@ public abstract class Types {
boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null));
field = new KeywordEsField(name, properties, docValues, length, normalized);
break;
case DATE:
case DATETIME:
Object fmt = content.get("format");
if (fmt != null) {
field = new DateEsField(name, properties, docValues, Strings.delimitedListToStringArray(fmt.toString(), "||"));
@ -118,4 +118,4 @@ public abstract class Types {
private static int intSetting(Object value, int defaultValue) {
return value == null ? defaultValue : Integer.parseInt(value.toString());
}
}
}

View File

@ -373,7 +373,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
}
public void testNotSupportedAggregateOnDate() {
assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [date]",
assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [datetime]",
error("SELECT AVG(date) FROM test"));
}
@ -510,14 +510,14 @@ public class VerifierErrorMessagesTests extends ESTestCase {
public void testHistogramInFilter() {
assertEquals("1:63: Cannot filter on grouping function [HISTOGRAM(date, INTERVAL 1 MONTH)], use its argument instead",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test WHERE "
+ "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATE) GROUP BY h"));
+ "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATETIME) GROUP BY h"));
}
// related https://github.com/elastic/elasticsearch/issues/36853
public void testHistogramInHaving() {
assertEquals("1:75: Cannot filter on grouping function [h], use its argument instead",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test GROUP BY h HAVING "
+ "h > CAST('2000-01-01' AS DATE)"));
+ "h > CAST('2000-01-01' AS DATETIME)"));
}
public void testGroupByScalarOnTopOfGrouping() {
@ -548,3 +548,4 @@ public class VerifierErrorMessagesTests extends ESTestCase {
e.getMessage());
}
}

View File

@ -190,7 +190,7 @@ public class IndexResolverTests extends ESTestCase {
}
private static boolean isAggregatable(DataType type) {
return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATE;
return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATETIME;
}
private static class UpdateableFieldCapabilities extends FieldCapabilities {

View File

@ -144,7 +144,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField("my_date_field", documentFieldValues);
hit.fields(singletonMap("my_date_field", field));
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATE, true);
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATETIME, true);
assertEquals(DateUtils.of(millis), extractor.extract(hit));
}

View File

@ -28,6 +28,6 @@ public class DayOfYearTests extends ESTestCase {
}
private DayOfYear build(Object value, ZoneId zoneId) {
return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATE), zoneId);
return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATETIME), zoneId);
}
}

View File

@ -77,7 +77,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals(interval(Duration.ofDays(1).plusHours(2), INTERVAL_DAY_TO_HOUR), L(x));
}
public void testAddYearMonthIntervalToDate() {
public void testAddYearMonthIntervalToDateTime() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Period.ofYears(100).plusMonths(50);
@ -86,7 +86,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals(L(now.plus(t)), L(x));
}
public void testAddDayTimeIntervalToDate() {
public void testAddDayTimeIntervalToDateTime() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Duration.ofHours(2);
@ -95,7 +95,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals(L(now.plus(t)), L(x));
}
public void testAddDayTimeIntervalToDateReverse() {
public void testAddDayTimeIntervalToDateTimeReverse() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Duration.ofHours(2);
@ -124,7 +124,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals(interval(Duration.ofDays(1).plusHours(8), INTERVAL_DAY_TO_HOUR), L(x));
}
public void testSubYearMonthIntervalToDate() {
public void testSubYearMonthIntervalToDateTime() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Period.ofYears(100).plusMonths(50);
@ -133,7 +133,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals(L(now.minus(t)), L(x));
}
public void testSubYearMonthIntervalToDateIllegal() {
public void testSubYearMonthIntervalToDateTimeIllegal() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Period.ofYears(100).plusMonths(50);
@ -148,7 +148,7 @@ public class BinaryArithmeticTests extends ESTestCase {
assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage());
}
public void testSubDayTimeIntervalToDate() {
public void testSubDayTimeIntervalToDateTime() {
ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC);
Literal l = L(now);
TemporalAmount t = Duration.ofHours(2);

View File

@ -330,7 +330,7 @@ public class OptimizerTests extends ESTestCase {
}
public void testConstantFoldingDatetime() {
Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE);
Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATETIME);
assertEquals(2018, foldFunction(new Year(EMPTY, cast, UTC)));
assertEquals(1, foldFunction(new MonthOfYear(EMPTY, cast, UTC)));
assertEquals(19, foldFunction(new DayOfMonth(EMPTY, cast, UTC)));

View File

@ -170,7 +170,7 @@ public class EscapedFunctionsTests extends ESTestCase {
public void testDateLiteral() {
Literal l = dateLiteral("2012-01-01");
assertThat(l.dataType(), is(DataType.DATE));
assertThat(l.dataType(), is(DataType.DATETIME));
}
public void testDateLiteralValidation() {
@ -192,7 +192,7 @@ public class EscapedFunctionsTests extends ESTestCase {
public void testTimestampLiteral() {
Literal l = timestampLiteral("2012-01-01 10:01:02.3456");
assertThat(l.dataType(), is(DataType.DATE));
assertThat(l.dataType(), is(DataType.DATETIME));
}
public void testTimestampLiteralValidation() {
@ -236,4 +236,4 @@ public class EscapedFunctionsTests extends ESTestCase {
LikePattern pattern = likeEscape("|%tring", "|");
assertThat(pattern.escape(), is('|'));
}
}
}

View File

@ -61,7 +61,7 @@ public class SysParserTests extends ESTestCase {
Command cmd = sql("SYS TYPES").v1();
List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
"INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND",
@ -160,4 +160,4 @@ public class SysParserTests extends ESTestCase {
}, ex -> fail(ex.getMessage())));
}
}
}

View File

@ -44,7 +44,7 @@ public class SysTypesTests extends ESTestCase {
Command cmd = sql("SYS TYPES").v1();
List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
"INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND",
@ -108,4 +108,4 @@ public class SysTypesTests extends ESTestCase {
assertEquals("IP", r.column(0));
}, ex -> fail(ex.getMessage())));
}
}
}

View File

@ -292,7 +292,7 @@ public class QueryFolderTests extends ESTestCase {
assertThat(ee.output().get(1).toString(), startsWith("a{s->"));
}
public void testGroupKeyTypes_Date() {
public void testGroupKeyTypes_DateTime() {
PhysicalPlan p = plan("SELECT count(*), date + INTERVAL '1-2' YEAR TO MONTH AS a FROM test GROUP BY a");
assertEquals(EsQueryExec.class, p.getClass());
EsQueryExec ee = (EsQueryExec) p;

View File

@ -170,7 +170,7 @@ public class QueryTranslatorTests extends ESTestCase {
}
public void testDateRangeCast() {
LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATE)");
LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATETIME)");
assertTrue(p instanceof Project);
p = ((Project) p).child();
assertTrue(p instanceof Filter);
@ -480,7 +480,7 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("+2-0", h.interval().fold().toString());
Expression field = h.field();
assertEquals(FieldAttribute.class, field.getClass());
assertEquals(DataType.DATE, field.dataType());
assertEquals(DataType.DATETIME, field.dataType());
}
public void testCountAndCountDistinctFolding() {

View File

@ -18,7 +18,7 @@ import java.time.ZonedDateTime;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime;
import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN;
import static org.elasticsearch.xpack.sql.type.DataType.BYTE;
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
import static org.elasticsearch.xpack.sql.type.DataType.DATETIME;
import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE;
import static org.elasticsearch.xpack.sql.type.DataType.FLOAT;
import static org.elasticsearch.xpack.sql.type.DataType.INTEGER;
@ -41,7 +41,7 @@ public class DataTypeConversionTests extends ESTestCase {
assertNull(conversion.convert(null));
assertEquals("10.0", conversion.convert(10.0));
conversion = conversionFor(DATE, KEYWORD);
conversion = conversionFor(DATETIME, KEYWORD);
assertNull(conversion.convert(null));
assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(dateTime(0)));
}
@ -80,8 +80,8 @@ public class DataTypeConversionTests extends ESTestCase {
assertEquals("cannot cast [0xff] to [Long]", e.getMessage());
}
public void testConversionToDate() {
DataType to = DATE;
public void testConversionToDateTime() {
DataType to = DATETIME;
{
Conversion conversion = conversionFor(DOUBLE, to);
assertNull(conversion.convert(null));
@ -112,8 +112,8 @@ public class DataTypeConversionTests extends ESTestCase {
// double check back and forth conversion
ZonedDateTime dt = TestUtils.now();
Conversion forward = conversionFor(DATE, KEYWORD);
Conversion back = conversionFor(KEYWORD, DATE);
Conversion forward = conversionFor(DATETIME, KEYWORD);
Conversion back = conversionFor(KEYWORD, DATETIME);
assertEquals(dt, back.convert(forward.convert(dt)));
Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff"));
assertEquals("cannot cast [0xff] to [Date]:Invalid format: \"0xff\" is malformed at \"xff\"", e.getMessage());

View File

@ -9,7 +9,7 @@ import org.elasticsearch.test.ESTestCase;
import java.util.EnumSet;
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
import static org.elasticsearch.xpack.sql.type.DataType.DATETIME;
import static org.elasticsearch.xpack.sql.type.DataType.FLOAT;
import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY;
import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR;
@ -37,32 +37,32 @@ import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlRadix;
public class DataTypesTests extends ESTestCase {
public void testMetaDataType() {
assertEquals(Integer.valueOf(9), metaSqlDataType(DATE));
DataType t = randomDataTypeNoDate();
assertEquals(Integer.valueOf(9), metaSqlDataType(DATETIME));
DataType t = randomDataTypeNoDateTime();
assertEquals(t.sqlType.getVendorTypeNumber(), metaSqlDataType(t));
}
public void testMetaDateTypeSub() {
assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATE));
assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDate()));
assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATETIME));
assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDateTime()));
}
public void testMetaMinimumScale() {
assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATE));
assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATETIME));
assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(LONG));
assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(FLOAT));
assertNull(metaSqlMinimumScale(KEYWORD));
}
public void testMetaMaximumScale() {
assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATE));
assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATETIME));
assertEquals(Short.valueOf((short) 0), metaSqlMaximumScale(LONG));
assertEquals(Short.valueOf((short) FLOAT.defaultPrecision), metaSqlMaximumScale(FLOAT));
assertNull(metaSqlMaximumScale(KEYWORD));
}
public void testMetaRadix() {
assertNull(metaSqlRadix(DATE));
assertNull(metaSqlRadix(DATETIME));
assertNull(metaSqlRadix(KEYWORD));
assertEquals(Integer.valueOf(10), metaSqlRadix(LONG));
assertEquals(Integer.valueOf(2), metaSqlRadix(FLOAT));
@ -108,7 +108,7 @@ public class DataTypesTests extends ESTestCase {
assertNull(compatibleInterval(INTERVAL_MINUTE_TO_SECOND, INTERVAL_MONTH));
}
private DataType randomDataTypeNoDate() {
return randomValueOtherThan(DataType.DATE, () -> randomFrom(DataType.values()));
private DataType randomDataTypeNoDateTime() {
return randomValueOtherThan(DataType.DATETIME, () -> randomFrom(DataType.values()));
}
}
}

View File

@ -14,7 +14,7 @@ import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
import static org.elasticsearch.xpack.sql.type.DataType.DATETIME;
import static org.elasticsearch.xpack.sql.type.DataType.INTEGER;
import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD;
import static org.elasticsearch.xpack.sql.type.DataType.NESTED;
@ -81,7 +81,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.getDataType(), is(DATETIME));
assertThat(field.isAggregatable(), is(true));
assertThat(field.getPrecision(), is(24));
@ -95,7 +95,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.getDataType(), is(DATETIME));
assertThat(field.isAggregatable(), is(true));
DateEsField dfield = (DateEsField) field;
// default types
@ -107,7 +107,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.getDataType(), is(DATETIME));
assertThat(field.isAggregatable(), is(true));
DateEsField dfield = (DateEsField) field;
// default types
@ -175,7 +175,7 @@ public class TypesTests extends ESTestCase {
Map<String, EsField> children = field.getProperties();
assertThat(children.size(), is(4));
assertThat(children.get("dep_name").getDataType(), is(TEXT));
assertThat(children.get("start_date").getDataType(), is(DATE));
assertThat(children.get("start_date").getDataType(), is(DATETIME));
}
public void testGeoField() {
@ -208,4 +208,4 @@ public class TypesTests extends ESTestCase {
assertNotNull("Could not find mapping resource:" + name, stream);
return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, ordered));
}
}
}