Add time zone support to the connection

Flows time zones through the `QueryInitRequest` and into the
`ExpressionBuilder` which attaches the time zones to date/time
expressions. Modifies the code that generates date aggs,
scripts, and extracts results to use the time zones.

Original commit: elastic/x-pack-elasticsearch@d6682580d1
This commit is contained in:
Nik Everett 2017-06-30 14:53:17 -04:00
parent 514b7290d7
commit 336980ec28
47 changed files with 517 additions and 245 deletions

View File

@ -5,47 +5,53 @@
*/
package org.elasticsearch.xpack.sql.jdbc.net.protocol;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto.Action;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto.Action;
import static java.lang.String.format;
import java.util.TimeZone;
public class QueryInitRequest extends Request {
public final int fetchSize;
public final String query;
public final TimeZone timeZone;
public final TimeoutInfo timeout;
public QueryInitRequest(int fetchSize, String query, TimeoutInfo timeout) {
public QueryInitRequest(int fetchSize, String query, TimeZone timeZone, TimeoutInfo timeout) {
super(Action.QUERY_INIT);
this.fetchSize = fetchSize;
this.query = query;
this.timeZone = timeZone;
this.timeout = timeout;
}
QueryInitRequest(DataInput in) throws IOException {
super(Action.QUERY_INIT);
fetchSize = in.readInt();
timeout = new TimeoutInfo(in);
query = in.readUTF();
timeZone = TimeZone.getTimeZone(in.readUTF());
timeout = new TimeoutInfo(in);
}
@Override
public void encode(DataOutput out) throws IOException {
out.writeInt(action.value());
out.writeInt(action.value()); // NOCOMMIT this should be written by the caller
out.writeInt(fetchSize);
timeout.encode(out);
out.writeUTF(query);
out.writeUTF(timeZone.getID());
timeout.encode(out);
}
@Override
public String toString() {
return format(Locale.ROOT, "SqlInitReq[%s]", query);
StringBuilder b = new StringBuilder();
b.append("SqlInitReq[").append(query).append(']');
if (false == timeZone.getID().equals("UTC")) {
b.append('[').append(timeZone.getID()).append(']');
}
return b.toString();
}
@Override
@ -56,11 +62,12 @@ public class QueryInitRequest extends Request {
QueryInitRequest other = (QueryInitRequest) obj;
return fetchSize == other.fetchSize
&& Objects.equals(query, other.query)
&& Objects.equals(timeout, other.timeout);
&& Objects.equals(timeout, other.timeout)
&& Objects.equals(timeZone.getID(), other.timeZone.getID());
}
@Override
public int hashCode() {
return Objects.hash(fetchSize, query, timeout);
return Objects.hash(fetchSize, query, timeout, timeZone.getID().hashCode());
}
}

View File

@ -14,7 +14,7 @@ import static org.elasticsearch.xpack.sql.test.RoundTripTestUtils.assertRoundTri
public class QueryInitRequestTests extends ESTestCase {
public static QueryInitRequest randomQueryInitRequest() {
return new QueryInitRequest(between(0, Integer.MAX_VALUE), randomAlphaOfLength(5), randomTimeoutInfo());
return new QueryInitRequest(between(0, Integer.MAX_VALUE), randomAlphaOfLength(5), randomTimeZone(random()), randomTimeoutInfo());
}
public void testRoundTrip() throws IOException {

View File

@ -5,6 +5,10 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.util.Assert;
import org.elasticsearch.xpack.sql.net.client.ConnectionConfiguration;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.DriverPropertyInfo;
@ -12,10 +16,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.elasticsearch.xpack.sql.jdbc.util.Assert;
import org.elasticsearch.xpack.sql.net.client.ConnectionConfiguration;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import java.util.TimeZone;
//
// Supports the following syntax
@ -43,7 +44,10 @@ public class JdbcConfiguration extends ConnectionConfiguration {
// can be out/err/url
static final String DEBUG_OUTPUT_DEFAULT = "err";
private static final List<String> KNOWN_OPTIONS = Arrays.asList(DEBUG, DEBUG_OUTPUT);
static final String TIME_ZONE = "time_zone";
static final String TIME_ZONE_DEFAULT = "UTC";
private static final List<String> KNOWN_OPTIONS = Arrays.asList(DEBUG, DEBUG_OUTPUT, TIME_ZONE);
private HostAndPort hostAndPort;
private String originalUrl;
@ -51,6 +55,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
private boolean debug = false;
private String debugOut = DEBUG_OUTPUT_DEFAULT;
private final TimeZone timeZone;
public JdbcConfiguration(String u, Properties props) {
super(props);
@ -60,6 +65,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
Properties set = settings();
debug = Boolean.parseBoolean(set.getProperty(DEBUG, DEBUG_DEFAULT));
debugOut = settings().getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT);
timeZone = TimeZone.getTimeZone(settings().getProperty(TIME_ZONE, TIME_ZONE_DEFAULT));
}
private void parseUrl(String u) {
@ -192,6 +198,10 @@ public class JdbcConfiguration extends ConnectionConfiguration {
return debugOut;
}
public TimeZone timeZone() {
return timeZone;
}
public static boolean canAccept(String url) {
return (StringUtils.hasText(url) && url.trim().startsWith(JdbcConfiguration.URL_PREFIX));
}

View File

@ -61,13 +61,13 @@ public class JdbcConnection implements Connection, JdbcWrapper {
@Override
public Statement createStatement() throws SQLException {
checkOpen();
return new JdbcStatement(this);
return new JdbcStatement(this, info);
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
checkOpen();
return new JdbcPreparedStatement(this, sql);
return new JdbcPreparedStatement(this, info, sql);
}
@Override

View File

@ -32,8 +32,8 @@ import java.util.Calendar;
class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
final PreparedQuery query;
JdbcPreparedStatement(JdbcConnection con, String sql) {
super(con);
JdbcPreparedStatement(JdbcConnection con, JdbcConfiguration info, String sql) {
super(con, info);
this.query = PreparedQuery.prepare(sql);
}

View File

@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.jdbc.net.client.RequestMeta;
class JdbcStatement implements Statement, JdbcWrapper {
final JdbcConnection con;
final JdbcConfiguration info;
private boolean closed = false;
private boolean closeOnCompletion = false;
@ -27,8 +28,9 @@ class JdbcStatement implements Statement, JdbcWrapper {
protected JdbcResultSet rs;
final RequestMeta requestMeta = new RequestMeta();
JdbcStatement(JdbcConnection jdbcConnection) {
JdbcStatement(JdbcConnection jdbcConnection, JdbcConfiguration info) {
this.con = jdbcConnection;
this.info = info;
}
@Override
@ -153,7 +155,7 @@ class JdbcStatement implements Statement, JdbcWrapper {
// close previous result set
closeResultSet();
Cursor cursor = con.client.query(sql, requestMeta);
Cursor cursor = con.client.query(sql, info.timeZone(), requestMeta);
rs = new JdbcResultSet(this, cursor);
}

View File

@ -37,6 +37,7 @@ import java.io.IOException;
import java.sql.SQLException;
import java.time.Instant;
import java.util.List;
import java.util.TimeZone;
public class JdbcHttpClient implements Closeable {
@FunctionalInterface
@ -64,14 +65,14 @@ public class JdbcHttpClient implements Closeable {
}
}
public Cursor query(String sql, RequestMeta meta) throws SQLException {
BytesArray ba = http.put(out -> queryRequest(out, meta, sql));
public Cursor query(String sql, TimeZone timeZone, RequestMeta meta) throws SQLException {
BytesArray ba = http.put(out -> queryRequest(out, meta, sql, timeZone));
return doIO(ba, in -> queryResponse(in, meta));
}
private void queryRequest(DataOutput out, RequestMeta meta, String sql) throws IOException {
private void queryRequest(DataOutput out, RequestMeta meta, String sql, TimeZone timeZone) throws IOException {
int fetch = meta.fetchSize() >= 0 ? meta.fetchSize() : conCfg.pageSize();
ProtoUtils.write(out, new QueryInitRequest(fetch, sql, timeout(meta)));
ProtoUtils.write(out, new QueryInitRequest(fetch, sql, timeZone, timeout(meta)));
}
public String nextPage(String requestId, Page page, RequestMeta meta) throws SQLException {

View File

@ -33,6 +33,7 @@ import java.nio.file.Path;
import java.sql.DriverManager;
import java.util.Arrays;
import java.util.Collection;
import java.util.TimeZone;
import java.util.function.Function;
import static java.util.Collections.emptySet;
@ -117,7 +118,8 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
@Before
public void setupJdbcTemplate() throws Exception {
j = new JdbcTemplate(() -> DriverManager.getConnection("jdbc:es://" + System.getProperty("tests.rest.cluster")));
j = new JdbcTemplate(() -> DriverManager.getConnection(
"jdbc:es://" + System.getProperty("tests.rest.cluster") + "/?time_zone=" + TimeZone.getDefault().getID()));
}
protected void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {

View File

@ -7,13 +7,12 @@ package org.elasticsearch.xpack.sql.jdbc.compare;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.sql.jdbc.JdbcIntegrationTestCase;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
@ -21,9 +20,15 @@ import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static java.util.Collections.emptyMap;
@ -34,6 +39,10 @@ import static org.elasticsearch.xpack.sql.jdbc.compare.JdbcAssert.assertResultSe
* Compares Elasticsearch's JDBC driver to H2.
*/
public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
static final DateTimeFormatter UTC_FORMATTER = DateTimeFormatter.ISO_DATE_TIME
.withLocale(Locale.ROOT)
.withZone(ZoneId.of("UTC"));
public final String queryName;
public final String query;
public final Integer lineNumber;
@ -102,6 +111,7 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
*/
try (Connection h2 = DriverManager.getConnection(
"jdbc:h2:mem:;DATABASE_TO_UPPER=false;ALIAS_COLUMN_NAME=true;INIT=RUNSCRIPT FROM 'classpath:/h2-setup.sql'")) {
fillH2(h2);
try (PreparedStatement h2Query = h2.prepareStatement(query);
ResultSet expected = h2Query.executeQuery()) {
setupElasticsearchIndex();
@ -113,7 +123,7 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
}
}
private void setupElasticsearchIndex() throws IOException, URISyntaxException {
private void setupElasticsearchIndex() throws Exception {
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
createIndex.startObject("settings"); {
createIndex.field("number_of_shards", 1);
@ -136,32 +146,80 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
createIndex.endObject().endObject();
client().performRequest("PUT", "/emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
URL dataSet = CompareToH2BaseTestCase.class.getResource("/employees.csv");
if (dataSet == null) {
throw new IllegalArgumentException("Can't find employees.csv");
}
StringBuilder bulk = new StringBuilder();
List<String> lines = Files.readAllLines(PathUtils.get(dataSet.toURI()));
if (lines.isEmpty()) {
throw new IllegalArgumentException("employees.csv must contain at least a title row");
}
String[] titles = lines.get(0).split(",");
for (int t = 0; t < titles.length; t++) {
titles[t] = titles[t].replaceAll("\"", "");
}
for (int l = 1; l < lines.size(); l++) {
csvToLines("employees", (titles, fields) -> {
bulk.append("{\"index\":{}}\n");
bulk.append('{');
String[] columns = lines.get(l).split(",");
for (int c = 0; c < columns.length; c++) {
if (c != 0) {
for (int f = 0; f < fields.size(); f++) {
if (f != 0) {
bulk.append(',');
}
bulk.append('"').append(titles[c]).append("\":\"").append(columns[c]).append('"');
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
}
bulk.append("}\n");
}
});
client().performRequest("POST", "/emp/emp/_bulk", singletonMap("refresh", "true"),
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
}
/**
* Fill the h2 database. Note that we have to parse the CSV ourselves
* because h2 interprets the CSV using the default locale which is
* randomized by the testing framework. Because some locales (th-TH,
* for example) parse dates in very different ways we parse using the
* root locale.
*/
private void fillH2(Connection h2) throws Exception {
csvToLines("employees", (titles, fields) -> {
StringBuilder insert = new StringBuilder("INSERT INTO \"emp.emp\" (");
for (int t = 0; t < titles.size(); t++) {
if (t != 0) {
insert.append(',');
}
insert.append('"').append(titles.get(t)).append('"');
}
insert.append(") VALUES (");
for (int t = 0; t < titles.size(); t++) {
if (t != 0) {
insert.append(',');
}
insert.append('?');
}
insert.append(')');
PreparedStatement s = h2.prepareStatement(insert.toString());
for (int t = 0; t < titles.size(); t++) {
String field = fields.get(t);
if (titles.get(t).endsWith("date")) {
/* Dates need special handling because H2 uses the default local for
* parsing which doesn't work because Elasticsearch always uses
* the "root" locale. This mismatch would cause the test to fail
* all the time in places like Thailand. Luckily Elasticsearch's
* randomized testing sometimes randomly pretends you are in
* Thailand and caught this.... */
s.setTimestamp(t + 1, new Timestamp(Instant.from(UTC_FORMATTER.parse(field)).toEpochMilli()));
} else {
s.setString(t + 1, field);
}
}
assertEquals(1, s.executeUpdate());
});
}
private void csvToLines(String name,
CheckedBiConsumer<List<String>, List<String>, Exception> consumeLine) throws Exception {
String location = "/" + name + ".csv";
URL dataSet = CompareToH2BaseTestCase.class.getResource(location);
if (dataSet == null) {
throw new IllegalArgumentException("Can't find [" + location + "]");
}
List<String> lines = Files.readAllLines(PathUtils.get(dataSet.toURI()));
if (lines.isEmpty()) {
throw new IllegalArgumentException("[" + location + "] must contain at least a title row");
}
List<String> titles = Arrays.asList(lines.get(0).split(","));
for (int l = 1; l < lines.size(); l++) {
consumeLine.accept(titles, Arrays.asList(lines.get(l).split(",")));
}
}
}

View File

@ -8,16 +8,17 @@ package org.elasticsearch.xpack.sql.jdbc.compare;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
import java.util.Locale;
import java.util.TimeZone;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.compare.CompareToH2BaseTestCase.UTC_FORMATTER;
import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.nameOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.nameOf;
public class JdbcAssert {
public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException {
assertResultSetMetadata(expected, actual);
@ -35,6 +36,7 @@ public class JdbcAssert {
String actualName = actualMeta.getColumnName(column);
if (!expectedName.equals(actualName)) {
// NOCOMMIT this needs a comment explaining it....
String expectedSet = expectedName;
String actualSet = actualName;
if (column > 1) {
@ -42,14 +44,16 @@ public class JdbcAssert {
actualSet = actualMeta.getColumnName(column - 1) + "," + actualName;
}
assertEquals(f("Different column name %d", column), expectedSet, actualSet);
assertEquals("Different column name [" + column + "]", expectedSet, actualSet);
}
// use the type not the name (timestamp with timezone returns spaces for example)
int expectedType = expectedMeta.getColumnType(column);
int actualType = actualMeta.getColumnType(column);
assertEquals(f("Different column type for column '%s' (%s vs %s), ", expectedName, nameOf(expectedType), nameOf(actualType)), expectedType, actualType);
assertEquals(
"Different column type for column [" + expectedName + "] (" + nameOf(expectedType) + " != " + nameOf(actualType) + ")",
expectedType, actualType);
}
}
@ -59,26 +63,32 @@ public class JdbcAssert {
long count = 0;
while (expected.next()) {
assertTrue(f("Expected more data but no more entries found after %d", count++), actual.next());
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
count++;
for (int column = 1; column <= columns; column++) {
Object expectedObject = expected.getObject(column);
Object actualObject = actual.getObject(column);
int type = metaData.getColumnType(column);
// handle timestamps with care because h2 returns "funny" objects
if (type == Types.TIMESTAMP_WITH_TIMEZONE) {
expectedObject = expected.getTimestamp(column);
actualObject = actual.getTimestamp(column);
} else if (type == Types.TIME) {
expectedObject = expected.getTime(column);
actualObject = actual.getTime(column);
} else if (type == Types.DATE) {
expectedObject = expected.getDate(column);
actualObject = actual.getDate(column);
}
String msg = "Different result for column [" + metaData.getColumnName(column) + "], entry [" + count + "]";
String msg = f("Different result for column %s, entry %d", metaData.getColumnName(column), count);
if (type == Types.TIMESTAMP) {
/*
* Life is just too confusing with timestamps and default
* time zones and default locales. Instead we compare the
* string representations of the dates converted into UTC
* in the ROOT locale. This gives us error messages in UTC
* on failure which is *way* easier to reason about.
*
* Life is confusing because H2 always uses the default
* locale and time zone for date functions.
*/
msg += " locale is [" + Locale.getDefault() + "] and time zone is [" + TimeZone.getDefault() + "]";
expectedObject = UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) expectedObject).getTime()));
actualObject = UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) actualObject).getTime()));
// NOCOMMIT look at ResultSet.getTimestamp(int, Calendar)
}
if (type == Types.DOUBLE) {
// NOCOMMIT 1d/1f seems like a huge difference.
@ -90,10 +100,6 @@ public class JdbcAssert {
}
}
}
assertEquals(f("%s still has data after %d entries", actual, count), expected.next(), actual.next());
}
private static String f(String message, Object... args) {
return format(Locale.ROOT, message, args);
assertEquals("[" + actual + "] still has data after [" + count + "] entries", expected.next(), actual.next());
}
}

View File

@ -194,4 +194,3 @@ aggAvgWithMultipleHavingWithLimit
SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "emp.emp" GROUP BY g HAVING a > 10 AND a < 10000000 LIMIT 1;
aggAvgWithMultipleHavingOnAliasAndFunction
SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "emp.emp" GROUP BY g HAVING a > 10 AND AVG(emp_no) > 10000000;

View File

@ -16,7 +16,7 @@
//
// Date
//
//
//dateTimeDay
//SELECT DAY(birth_date) d, last_name l FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;
@ -56,4 +56,4 @@ SELECT YEAR(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY
//dateTimeAggByMonth
//SELECT MONTH(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY MONTH(birth_date) ORDER BY MONTH(birth_date) LIMIT 5;
//dateTimeAggByDayOfMonth
//SELECT DAY_OF_MONTH(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY DAY_OF_MONTH(birth_date) ORDER BY DAY_OF_MONTH(birth_date) DESC;
//SELECT DAY_OF_MONTH(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY DAY_OF_MONTH(birth_date) ORDER BY DAY_OF_MONTH(birth_date) DESC;

View File

@ -1,4 +1,4 @@
"birth_date","emp_no","first_name","gender","hire_date","last_name"
birth_date,emp_no,first_name,gender,hire_date,last_name
1953-09-02T00:00:00Z,10001,Georgi,M,1986-06-26T00:00:00Z,Facello
1964-06-02T00:00:00Z,10002,Bezalel,F,1985-11-21T00:00:00Z,Simmel
1959-12-03T00:00:00Z,10003,Parto,M,1986-08-28T00:00:00Z,Bamford

1 birth_date emp_no first_name gender hire_date last_name
2 1953-09-02T00:00:00Z 10001 Georgi M 1986-06-26T00:00:00Z Facello
3 1964-06-02T00:00:00Z 10002 Bezalel F 1985-11-21T00:00:00Z Simmel
4 1959-12-03T00:00:00Z 10003 Parto M 1986-08-28T00:00:00Z Bamford

View File

@ -4,5 +4,4 @@ CREATE TABLE "emp.emp" ("birth_date" TIMESTAMP,
"gender" VARCHAR(1),
"hire_date" TIMESTAMP,
"last_name" VARCHAR(50)
)
AS SELECT * FROM CSVREAD('classpath:/employees.csv');
);

View File

@ -281,7 +281,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
else if (plan instanceof Aggregate) {
Aggregate a = (Aggregate) plan;
if (hasStar(a.aggregates())) {
return new Aggregate(a.location(), a.child(), a.groupings(), expandProjections(a.aggregates(), a.child()));
return new Aggregate(a.location(), a.child(), a.groupings(),
expandProjections(a.aggregates(), a.child()));
}
// if the grouping is unresolved but the aggs are, use the latter to resolve the former
// solves the case of queries declaring an alias in SELECT and referring to it in GROUP BY
@ -640,7 +641,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
// TODO: might be removed
// dedicated count optimization
if (name.toUpperCase(Locale.ROOT).equals("COUNT")) {
uf = new UnresolvedFunction(uf.location(), uf.name(), uf.distinct(), singletonList(Literal.of(uf.arguments().get(0).location(), Integer.valueOf(1))));
uf = new UnresolvedFunction(uf.location(), uf.name(), uf.distinct(), uf.timeZone(),
singletonList(Literal.of(uf.arguments().get(0).location(), Integer.valueOf(1))));
}
}
@ -916,7 +918,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
// Replace the resolved expression
if (!resolvedAggExp.isEmpty()) {
// push them down to the agg
Aggregate newAgg = new Aggregate(agg.location(), agg.child(), agg.groupings(), combine(agg.aggregates(), resolvedAggExp));
Aggregate newAgg = new Aggregate(agg.location(), agg.child(), agg.groupings(),
combine(agg.aggregates(), resolvedAggExp));
// wire it up to the filter with the new condition
Filter newFilter = new Filter(f.location(), newAgg, analyzedFilterCondition);
// and finally project the fluff away

View File

@ -5,9 +5,6 @@
*/
package org.elasticsearch.xpack.sql.execution;
import java.io.IOException;
import java.util.function.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
@ -24,7 +21,13 @@ import org.elasticsearch.xpack.sql.session.RowSetCursor;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.session.SqlSettings;
import java.io.IOException;
import java.util.TimeZone;
import java.util.function.Supplier;
public class PlanExecutor extends AbstractLifecycleComponent {
// NOCOMMIT prefer not to use AbstractLifecycleComponent because the reasons for its tradeoffs is lost to the mists of time
private static final SqlSettings DEFAULTS = SqlSettings.EMPTY;
private final Client client;
@ -35,8 +38,6 @@ public class PlanExecutor extends AbstractLifecycleComponent {
private final Optimizer optimizer;
private final Planner planner;
private final SqlSettings DEFAULTS = SqlSettings.EMPTY;
public PlanExecutor(Client client, Supplier<ClusterState> clusterState) {
super(client.settings());
@ -58,9 +59,9 @@ public class PlanExecutor extends AbstractLifecycleComponent {
return new SqlSession(DEFAULTS, client, parser, catalog, functionRegistry, analyzer, optimizer, planner);
}
public void sql(String sql, ActionListener<RowSetCursor> listener) {
public void sql(String sql, TimeZone timeZone, ActionListener<RowSetCursor> listener) {
SqlSession session = newSession();
session.executable(sql).execute(session, listener);
session.executable(sql, timeZone).execute(session, listener);
}
@Override

View File

@ -5,14 +5,6 @@
*/
package org.elasticsearch.xpack.sql.expression.function;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Expression;
@ -24,6 +16,15 @@ import org.elasticsearch.xpack.sql.tree.NodeUtils.NodeInfo;
import org.elasticsearch.xpack.sql.util.Assert;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList;
@ -101,6 +102,7 @@ abstract class AbstractFunctionRegistry implements FunctionRegistry {
boolean distinctAware = true;
boolean noArgument = false;
boolean tzAware = false;
// distinct ctor
if (!Arrays.equals(new Class[] { Location.class, exp, boolean.class }, info.ctr.getParameterTypes())) {
if (ur.distinct()) {
@ -112,6 +114,9 @@ abstract class AbstractFunctionRegistry implements FunctionRegistry {
if (expVal instanceof List && ((List) expVal).isEmpty()) {
noArgument = Arrays.equals(new Class[] { Location.class }, info.ctr.getParameterTypes());
}
else if (Arrays.equals(new Class[] { Location.class, exp, TimeZone.class }, info.ctr.getParameterTypes())) {
tzAware = true;
}
// distinctless
else if (!Arrays.equals(new Class[] { Location.class, exp }, info.ctr.getParameterTypes())) {
throw new SqlIllegalArgumentException("No constructor with signature [%s, %s (,%s)?] found for [%s]",
@ -120,7 +125,13 @@ abstract class AbstractFunctionRegistry implements FunctionRegistry {
}
try {
Object[] args = noArgument ? new Object[] { ur.location() } : (distinctAware ? new Object[] { ur.location(), expVal, ur.distinct() } : new Object[] { ur.location(), expVal });
// NOCOMMIT reflection here feels icky
Object[] args;
if (tzAware) {
args = new Object[] { ur.location(), expVal, ur.timeZone() };
} else {
args = noArgument ? new Object[] { ur.location() } : (distinctAware ? new Object[] { ur.location(), expVal, ur.distinct() } : new Object[] { ur.location(), expVal });
}
return (Function) info.ctr.newInstance(args);
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
throw new SqlIllegalArgumentException(ex, "Cannot create instance of function %s", ur.name());

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.sql.expression.function;
import java.util.List;
import org.elasticsearch.xpack.sql.capabilities.Unresolvable;
import org.elasticsearch.xpack.sql.capabilities.UnresolvedException;
import org.elasticsearch.xpack.sql.expression.Attribute;
@ -14,15 +12,20 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import java.util.List;
import java.util.TimeZone;
public class UnresolvedFunction extends Function implements Unresolvable {
private final String name;
private final boolean distinct;
private final TimeZone timeZone;
public UnresolvedFunction(Location location, String name, boolean distinct, List<Expression> children) {
public UnresolvedFunction(Location location, String name, boolean distinct, TimeZone timeZone, List<Expression> children) {
super(location, children);
this.name = name;
this.distinct = distinct;
this.timeZone = timeZone;
}
@Override
@ -44,6 +47,10 @@ public class UnresolvedFunction extends Function implements Unresolvable {
return distinct;
}
public TimeZone timeZone() {
return timeZone;
}
@Override
public DataType dataType() {
throw new UnresolvedException("dataType", this);

View File

@ -19,16 +19,19 @@ import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import java.util.Locale;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class DateTimeFunction extends ScalarFunction {
private final TimeZone timeZone;
public DateTimeFunction(Location location, Expression argument) {
// NOCOMMIT I feel like our lives could be made a lot simpler with composition instead of inheritance here
public DateTimeFunction(Location location, Expression argument, TimeZone timeZone) {
super(location, argument);
this.timeZone = timeZone;
}
@Override
@ -45,8 +48,11 @@ public abstract class DateTimeFunction extends ScalarFunction {
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(createTemplate("doc[{}].date"),
paramsBuilder().variable(field.name()).build(),
// NOCOMMIT I think we should investigate registering SQL as a script engine so we don't need to generate painless
return new ScriptTemplate(createTemplate(),
paramsBuilder()
.variable(field.name())
.build(),
dataType());
}
@ -55,8 +61,18 @@ public abstract class DateTimeFunction extends ScalarFunction {
throw new UnsupportedOperationException();
}
private String createTemplate(String template) {
return format(Locale.ROOT, "%s.get%s()", formatTemplate(template), extractFunction());
private String createTemplate() {
if (timeZone.getID().equals("UTC")) {
return formatTemplate("doc[{}].value.get" + extractFunction() + "()");
} else {
// NOCOMMIT ewwww
/* This uses the Java 9 time API because Painless doesn't whitelist creation of new
* Joda classes. */
String asInstant = formatTemplate("Instant.ofEpochMilli(doc[{}].value.millis)");
String zoneId = "ZoneId.of(\"" + timeZone.toZoneId().getId() + "\"";
String asZonedDateTime = "ZonedDateTime.ofInstant(" + asInstant + ", " + zoneId + "))";
return asZonedDateTime + ".get(ChronoField." + chronoField().name() + ")";
}
}
protected String extractFunction() {
@ -75,6 +91,10 @@ public abstract class DateTimeFunction extends ScalarFunction {
else {
dt = (ReadableDateTime) l;
}
if (false == timeZone.getID().equals("UTC")) {
// TODO probably faster to use `null` for UTC like core does
dt = dt.toDateTime().withZone(DateTimeZone.forTimeZone(timeZone));
}
return Integer.valueOf(extract(dt));
};
}
@ -84,6 +104,10 @@ public abstract class DateTimeFunction extends ScalarFunction {
return DataTypes.INTEGER;
}
public TimeZone timeZone() {
return timeZone;
}
protected abstract int extract(ReadableDateTime dt);
// used for aggregration (date histogram)
@ -91,4 +115,7 @@ public abstract class DateTimeFunction extends ScalarFunction {
// used for applying ranges
public abstract String dateTimeFormat();
// used for generating the painless script version of this function when the time zone is not utc
protected abstract ChronoField chronoField();
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class DayOfMonth extends DateTimeFunction {
public DayOfMonth(Location location, Expression argument) {
super(location, argument);
public DayOfMonth(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class DayOfMonth extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getDayOfMonth();
}
@Override
protected ChronoField chronoField() {
return ChronoField.DAY_OF_MONTH;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class DayOfWeek extends DateTimeFunction {
public DayOfWeek(Location location, Expression argument) {
super(location, argument);
public DayOfWeek(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class DayOfWeek extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getDayOfWeek();
}
@Override
protected ChronoField chronoField() {
return ChronoField.DAY_OF_WEEK;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class DayOfYear extends DateTimeFunction {
public DayOfYear(Location location, Expression argument) {
super(location, argument);
public DayOfYear(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class DayOfYear extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getDayOfYear();
}
@Override
protected ChronoField chronoField() {
return ChronoField.DAY_OF_YEAR;
}
}

View File

@ -8,104 +8,106 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.TimeZone;
public enum Extract {
YEAR {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new Year(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new Year(source, argument, timeZone);
}
},
MONTH {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new MonthOfYear(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new MonthOfYear(source, argument, timeZone);
}
},
WEEK {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new WeekOfWeekYear(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new WeekOfWeekYear(source, argument, timeZone);
}
},
DAY {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new DayOfMonth(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new DayOfMonth(source, argument, timeZone);
}
},
DAY_OF_MONTH {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return DAY.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return DAY.toFunction(source, argument, timeZone);
}
},
DOM {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return DAY.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return DAY.toFunction(source, argument, timeZone);
}
},
DAY_OF_WEEK {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new DayOfWeek(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new DayOfWeek(source, argument, timeZone);
}
},
DOW {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return DAY_OF_WEEK.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return DAY_OF_WEEK.toFunction(source, argument, timeZone);
}
},
DAY_OF_YEAR {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new DayOfYear(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new DayOfYear(source, argument, timeZone);
}
},
DOY {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return DAY_OF_YEAR.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return DAY_OF_YEAR.toFunction(source, argument, timeZone);
}
},
HOUR {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new HourOfDay(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new HourOfDay(source, argument, timeZone);
}
},
MINUTE {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new MinuteOfHour(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new MinuteOfHour(source, argument, timeZone);
}
},
MINUTE_OF_HOUR {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return MINUTE.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return MINUTE.toFunction(source, argument, timeZone);
}
},
MINUTE_OF_DAY {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new MinuteOfDay(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new MinuteOfDay(source, argument, timeZone);
}
},
SECOND {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return new SecondOfMinute(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return new SecondOfMinute(source, argument, timeZone);
}
},
SECOND_OF_MINUTE {
@Override
public DateTimeFunction toFunction(Location source, Expression argument) {
return SECOND.toFunction(source, argument);
public DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone) {
return SECOND.toFunction(source, argument, timeZone);
}
};
public abstract DateTimeFunction toFunction(Location source, Expression argument);
public abstract DateTimeFunction toFunction(Location source, Expression argument, TimeZone timeZone);
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class HourOfDay extends DateTimeFunction {
public HourOfDay(Location location, Expression argument) {
super(location, argument);
public HourOfDay(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class HourOfDay extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getHourOfDay();
}
@Override
protected ChronoField chronoField() {
return ChronoField.HOUR_OF_DAY;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class MinuteOfDay extends DateTimeFunction {
public MinuteOfDay(Location location, Expression argument) {
super(location, argument);
public MinuteOfDay(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class MinuteOfDay extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getMinuteOfDay();
}
@Override
protected ChronoField chronoField() {
return ChronoField.MINUTE_OF_DAY;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class MinuteOfHour extends DateTimeFunction {
public MinuteOfHour(Location location, Expression argument) {
super(location, argument);
public MinuteOfHour(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class MinuteOfHour extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getMinuteOfHour();
}
@Override
protected ChronoField chronoField() {
return ChronoField.MINUTE_OF_HOUR;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class MonthOfYear extends DateTimeFunction {
public MonthOfYear(Location location, Expression argument) {
super(location, argument);
public MonthOfYear(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class MonthOfYear extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getMonthOfYear();
}
@Override
protected ChronoField chronoField() {
return ChronoField.MONTH_OF_YEAR;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class SecondOfMinute extends DateTimeFunction {
public SecondOfMinute(Location location, Expression argument) {
super(location, argument);
public SecondOfMinute(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class SecondOfMinute extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getSecondOfMinute();
}
@Override
protected ChronoField chronoField() {
return ChronoField.SECOND_OF_MINUTE;
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class WeekOfWeekYear extends DateTimeFunction {
public WeekOfWeekYear(Location location, Expression argument) {
super(location, argument);
public WeekOfWeekYear(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -29,4 +32,9 @@ public class WeekOfWeekYear extends DateTimeFunction {
protected int extract(ReadableDateTime dt) {
return dt.getWeekOfWeekyear();
}
@Override
protected ChronoField chronoField() {
return ChronoField.ALIGNED_WEEK_OF_YEAR; // NOCOMMIT is this right?
}
}

View File

@ -9,10 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.ReadableDateTime;
import java.time.temporal.ChronoField;
import java.util.TimeZone;
public class Year extends DateTimeFunction {
public Year(Location location, Expression argument) {
super(location, argument);
public Year(Location location, Expression argument, TimeZone timeZone) {
super(location, argument, timeZone);
}
@Override
@ -34,4 +37,9 @@ public class Year extends DateTimeFunction {
public Expression orderBy() {
return argument();
}
@Override
protected ChronoField chronoField() {
return ChronoField.YEAR;
}
}

View File

@ -8,7 +8,12 @@ package org.elasticsearch.xpack.sql.parser;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SingleStatementContext;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import java.util.TimeZone;
class AstBuilder extends CommandBuilder {
AstBuilder(TimeZone timeZone) {
super(timeZone);
}
@Override
public LogicalPlan visitSingleStatement(SingleStatementContext ctx) {

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.parser;
import java.util.Locale;
import java.util.TimeZone;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DebugContext;
@ -31,6 +32,9 @@ import org.elasticsearch.xpack.sql.plan.logical.command.ShowTables;
import org.elasticsearch.xpack.sql.tree.Location;
abstract class CommandBuilder extends LogicalPlanBuilder {
CommandBuilder(TimeZone timeZone) {
super(timeZone);
}
@Override
public Command visitDebug(DebugContext ctx) {

View File

@ -5,43 +5,10 @@
*/
package org.elasticsearch.xpack.sql.parser;
import java.math.BigDecimal;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.MatchQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.MultiMatchQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NullLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SelectExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SingleExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StarContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Exists;
import org.elasticsearch.xpack.sql.expression.Expression;
@ -69,14 +36,64 @@ import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQuery
import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate;
import org.elasticsearch.xpack.sql.expression.regex.Like;
import org.elasticsearch.xpack.sql.expression.regex.RLike;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.MatchQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.MultiMatchQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NullLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SelectExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SingleExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StarContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
import java.math.BigDecimal;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.stream.Collectors;
import static java.lang.String.format;
abstract class ExpressionBuilder extends IdentifierBuilder {
/**
* Time zone in which to execute the query. Used by date time
* functions and the rounding in date histograms.
*/
private final TimeZone timeZone;
ExpressionBuilder(TimeZone timeZone) {
this.timeZone = timeZone;
}
/**
* Time zone in which to execute the query. Used by date time
* functions and the rounding in date histograms.
*/
protected TimeZone timeZone() {
return timeZone;
}
protected Expression expression(ParseTree ctx) {
return typedParsing(ctx, Expression.class);
@ -287,7 +304,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
if (ctx.setQuantifier() != null) {
isDistinct = (ctx.setQuantifier().DISTINCT() != null);
}
return new UnresolvedFunction(source(ctx), name, isDistinct, expressions(ctx.expression()));
return new UnresolvedFunction(source(ctx), name, isDistinct, timeZone, expressions(ctx.expression()));
}
@Override
@ -300,7 +317,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} catch (IllegalArgumentException ex) {
throw new ParsingException(source, format(Locale.ROOT, "Invalid EXTRACT field %s", fieldString));
}
return extract.toFunction(source, expression(ctx.valueExpression()));
return extract.toFunction(source, expression(ctx.valueExpression()), timeZone);
}
@Override

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.parser;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedRelationContext;
@ -47,6 +48,9 @@ import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList;
abstract class LogicalPlanBuilder extends ExpressionBuilder {
LogicalPlanBuilder(TimeZone timeZone) {
super(timeZone);
}
@Override
public LogicalPlan visitQuery(QueryContext ctx) {

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.sql.parser;
import java.util.function.Function;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
@ -18,33 +16,33 @@ import org.antlr.v4.runtime.atn.PredictionMode;
import org.antlr.v4.runtime.misc.Pair;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.sql.parser.SqlBaseBaseListener;
import org.elasticsearch.xpack.sql.parser.SqlBaseLexer;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import java.util.TimeZone;
import java.util.function.Function;
public class SqlParser {
private static final Logger log = Loggers.getLogger(SqlParser.class);
public LogicalPlan createStatement(String sql) {
public LogicalPlan createStatement(String sql, TimeZone timeZone) {
if (log.isDebugEnabled()) {
log.debug("Parsing as statement: {}", sql);
}
return invokeParser("statement", sql, SqlBaseParser::singleStatement);
return invokeParser("statement", sql, timeZone, SqlBaseParser::singleStatement);
}
public Expression createExpression(String expression) {
public Expression createExpression(String expression, TimeZone timeZone) {
if (log.isDebugEnabled()) {
log.debug("Parsing as expression: {}", expression);
}
return invokeParser("expression", expression, SqlBaseParser::singleExpression);
return invokeParser("expression", expression, timeZone, SqlBaseParser::singleExpression);
}
@SuppressWarnings("unchecked")
private <T> T invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) {
private <T> T invokeParser(String name, String sql, TimeZone timeZone, Function<SqlBaseParser, ParserRuleContext> parseFunction) {
try {
SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql));
@ -75,7 +73,7 @@ public class SqlParser {
postProcess(lexer, parser, tree);
return (T) new AstBuilder().visit(tree);
return (T) new AstBuilder(timeZone).visit(tree);
}
catch (StackOverflowError e) {

View File

@ -5,9 +5,6 @@
*/
package org.elasticsearch.xpack.sql.plan.logical;
import java.util.List;
import java.util.Objects;
import org.elasticsearch.xpack.sql.capabilities.Resolvables;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
@ -15,6 +12,9 @@ import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.List;
import java.util.Objects;
public class Aggregate extends UnaryPlan {
private final List<Expression> groupings;

View File

@ -5,14 +5,14 @@
*/
package org.elasticsearch.xpack.sql.plan.physical;
import java.util.List;
import java.util.Objects;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import java.util.List;
import java.util.Objects;
public class AggregateExec extends UnaryExec implements Unexecutable {
private final List<? extends Expression> groupings;

View File

@ -257,7 +257,7 @@ abstract class QueryTranslator {
// dates are handled differently because of date histograms
if (exp instanceof DateTimeFunction) {
DateTimeFunction dtf = (DateTimeFunction) exp;
agg = new GroupByDateAgg(aggId, AggPath.bucketValue(propertyPath), nameOf(exp), dtf.interval());
agg = new GroupByDateAgg(aggId, AggPath.bucketValue(propertyPath), nameOf(exp), dtf.interval(), dtf.timeZone());
}
else {
agg = new GroupByColumnAgg(aggId, AggPath.bucketValue(propertyPath), ne.name());

View File

@ -21,6 +21,7 @@ import org.elasticsearch.xpack.sql.jdbc.net.protocol.QueryPageRequest;
import org.elasticsearch.xpack.sql.plugin.cli.http.CliServerProtoUtils;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.TimeZone;
import java.util.function.Supplier;
import static org.elasticsearch.action.ActionListener.wrap;
@ -60,8 +61,9 @@ public class CliServer {
public void command(CommandRequest req, ActionListener<Response> listener) {
final long start = System.currentTimeMillis();
executor.sql(req.command, wrap(
// TODO support non-utc for cli server
executor.sql(req.command, TimeZone.getTimeZone("UTC"), wrap(
c -> {
long stop = System.currentTimeMillis();
String requestId = EMPTY;

View File

@ -113,7 +113,7 @@ public class JdbcServer {
public void queryInit(QueryInitRequest req, ActionListener<Response> listener) {
final long start = System.currentTimeMillis();
executor.sql(req.query, wrap(c -> {
executor.sql(req.query, req.timeZone, wrap(c -> {
long stop = System.currentTimeMillis();
String requestId = EMPTY;
if (c.hasNextSet() && c instanceof SearchHitRowSetCursor) {

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.plugin.sql.action;
import java.io.IOException;
import java.util.Objects;
import java.util.TimeZone;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
@ -21,13 +22,15 @@ public class SqlRequest extends ActionRequest implements CompositeIndicesRequest
// initialized on the first request
private String query;
private TimeZone timeZone;
// initialized after the plan has been translated
private String sessionId;
public SqlRequest() {}
public SqlRequest(String query, String sessionId) {
public SqlRequest(String query, TimeZone timeZone, String sessionId) {
this.query = query;
this.timeZone = timeZone;
this.sessionId = sessionId;
}
@ -48,6 +51,10 @@ public class SqlRequest extends ActionRequest implements CompositeIndicesRequest
return sessionId;
}
public TimeZone timeZone() {
return timeZone;
}
public SqlRequest query(String query) {
this.query = query;
return this;
@ -62,6 +69,7 @@ public class SqlRequest extends ActionRequest implements CompositeIndicesRequest
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
query = in.readString();
timeZone = TimeZone.getTimeZone(in.readString());
sessionId = in.readOptionalString();
}
@ -69,6 +77,7 @@ public class SqlRequest extends ActionRequest implements CompositeIndicesRequest
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(query);
out.writeString(timeZone.getID());
out.writeOptionalString(sessionId);
}

View File

@ -8,14 +8,16 @@ package org.elasticsearch.xpack.sql.plugin.sql.action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import java.util.TimeZone;
public class SqlRequestBuilder extends ActionRequestBuilder<SqlRequest, SqlResponse, SqlRequestBuilder> {
public SqlRequestBuilder(ElasticsearchClient client, SqlAction action) {
this(client, action, null, null);
this(client, action, null, null, null);
}
public SqlRequestBuilder(ElasticsearchClient client, SqlAction action, String query, String sessionId) {
super(client, action, new SqlRequest(query, sessionId));
public SqlRequestBuilder(ElasticsearchClient client, SqlAction action, String query, TimeZone timeZone, String sessionId) {
super(client, action, new SqlRequest(query, timeZone, sessionId));
}
public SqlRequestBuilder query(String query) {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.xpack.sql.analysis.catalog.EsCatalog;
import org.elasticsearch.xpack.sql.execution.PlanExecutor;
import org.elasticsearch.xpack.sql.session.RowSetCursor;
import java.util.TimeZone;
import java.util.function.Supplier;
import static org.elasticsearch.xpack.sql.util.ActionUtils.chain;
@ -59,6 +60,7 @@ public class TransportSqlAction extends HandledTransportAction<SqlRequest, SqlRe
protected void doExecute(SqlRequest request, ActionListener<SqlResponse> listener) {
String sessionId = request.sessionId();
String query = request.query();
TimeZone timeZone = request.timeZone();
try {
if (sessionId == null) {
@ -69,7 +71,7 @@ public class TransportSqlAction extends HandledTransportAction<SqlRequest, SqlRe
// NOCOMMIT move session information somewhere - like into scroll or something. We should be able to reuse something.
// generate the plan and once its done, generate the session id, store it and send back the response
planExecutor.sql(query, chain(listener, c -> {
planExecutor.sql(query, timeZone, chain(listener, c -> {
String id = generateId();
SESSIONS.put(id, c);
return new SqlResponse(id, c);

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.plugin.sql.rest;
import java.io.IOException;
import java.util.TimeZone;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.client.node.NodeClient;
@ -35,17 +36,16 @@ public class RestSqlAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
Payload p = null;
Payload p;
try {
p = Payload.from(request);
} catch (IOException ex) {
return channel -> error(channel, ex);
}
final String query = p.query;
return channel -> client.executeLocally(SqlAction.INSTANCE, new SqlRequest(query, null), new CursorRestResponseListener(channel));
return channel -> client.executeLocally(SqlAction.INSTANCE, new SqlRequest(p.query, p.timeZone, null),
new CursorRestResponseListener(channel));
}
private void error(RestChannel channel, Exception ex) {
@ -69,9 +69,11 @@ public class RestSqlAction extends BaseRestHandler {
static {
PARSER.declareString(Payload::setQuery, new ParseField("query"));
PARSER.declareString(Payload::setTimeZone, new ParseField("time_zone"));
}
String query;
TimeZone timeZone;
static Payload from(RestRequest request) throws IOException {
Payload payload = new Payload();
@ -81,9 +83,14 @@ public class RestSqlAction extends BaseRestHandler {
return payload;
}
public void setQuery(String query) {
this.query = query;
}
public void setTimeZone(String timeZone) {
this.timeZone = TimeZone.getTimeZone(timeZone);
}
}
}

View File

@ -5,33 +5,37 @@
*/
package org.elasticsearch.xpack.sql.querydsl.agg;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.xpack.sql.querydsl.container.Sort;
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
import org.joda.time.DateTimeZone;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimeZone;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
public class GroupByDateAgg extends GroupingAgg {
private final String interval;
private final TimeZone timeZone;
public GroupByDateAgg(String id, String propertyPath, String fieldName, String interval) {
this(id, propertyPath, fieldName, interval, emptyList(), emptyList(), emptyMap());
public GroupByDateAgg(String id, String propertyPath, String fieldName, String interval, TimeZone timeZone) {
this(id, propertyPath, fieldName, interval, timeZone, emptyList(), emptyList(), emptyMap());
}
public GroupByDateAgg(String id, String propertyPath, String fieldName, String interval, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
public GroupByDateAgg(String id, String propertyPath, String fieldName, String interval, TimeZone timeZone, List<LeafAgg> subAggs,
List<PipelineAgg> subPipelines, Map<String, Direction> order) {
super(id, propertyPath, fieldName, subAggs, subPipelines, order);
this.interval = interval;
this.timeZone = timeZone;
}
public String interval() {
@ -42,6 +46,7 @@ public class GroupByDateAgg extends GroupingAgg {
protected AggregationBuilder toGroupingAgg() {
DateHistogramAggregationBuilder dhab = dateHistogram(id())
.field(fieldName())
.timeZone(DateTimeZone.forTimeZone(timeZone))
.dateHistogramInterval(new DateHistogramInterval(interval));
if (!order().isEmpty()) {
for (Entry<String, Sort.Direction> entry : order().entrySet()) {
@ -65,6 +70,6 @@ public class GroupByDateAgg extends GroupingAgg {
@Override
protected GroupingAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
return new GroupByDateAgg(id, propertyPath, fieldName, interval, subAggs, subPipelines, order);
return new GroupByDateAgg(id, propertyPath, fieldName, interval, timeZone, subAggs, subPipelines, order);
}
}

View File

@ -95,6 +95,7 @@ public abstract class GroupingAgg extends Agg {
return Objects.equals(this.order.get(leafAggId), order) ? this : clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, combine(this.order, singletonMap(leafAggId, order)));
}
// NOCOMMIT clone is a scary name.
protected abstract GroupingAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order);
@Override

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.sql.session;
import java.util.function.Function;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
@ -20,6 +18,9 @@ import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.sql.planner.Planner;
import java.util.TimeZone;
import java.util.function.Function;
public class SqlSession {
private final Client client;
@ -78,12 +79,12 @@ public class SqlSession {
return optimizer;
}
public LogicalPlan parse(String sql) {
return parser.createStatement(sql);
public LogicalPlan parse(String sql, TimeZone timeZone) {
return parser.createStatement(sql, timeZone);
}
public Expression expression(String expression) {
return parser.createExpression(expression);
public Expression expression(String expression, TimeZone timeZone) {
return parser.createExpression(expression, timeZone);
}
public LogicalPlan analyzedPlan(LogicalPlan plan, boolean verify) {
@ -98,8 +99,8 @@ public class SqlSession {
return planner.plan(optimizedPlan(optimized), verify);
}
public PhysicalPlan executable(String sql) {
return physicalPlan(parse(sql), true);
public PhysicalPlan executable(String sql, TimeZone timeZone) {
return physicalPlan(parse(sql, timeZone), true);
}
public SqlSettings defaults() {