SQL: DATABASE() and USER() system functions (#35946)
This commit is contained in:
parent
8e86340d1d
commit
aabff7318e
|
@ -13,6 +13,7 @@
|
|||
* <<sql-functions-string, String>>
|
||||
* <<sql-functions-type-conversion,Type Conversion>>
|
||||
* <<sql-functions-conditional, Conditional>>
|
||||
* <<sql-functions-system, System>>
|
||||
|
||||
include::operators.asciidoc[]
|
||||
include::aggs.asciidoc[]
|
||||
|
@ -22,3 +23,4 @@ include::math.asciidoc[]
|
|||
include::string.asciidoc[]
|
||||
include::type-conversion.asciidoc[]
|
||||
include::conditional.asciidoc[]
|
||||
include::system.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[sql-functions-system]]
|
||||
=== System Functions
|
||||
|
||||
These functions return metadata type of information about the system being queried.
|
||||
|
||||
[[sql-functions-system-database]]
|
||||
==== `DATABASE`
|
||||
|
||||
.Synopsis:
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
DATABASE()
|
||||
--------------------------------------------------
|
||||
|
||||
*Input*:
|
||||
|
||||
*Output*: string
|
||||
|
||||
.Description:
|
||||
|
||||
Returns the name of the database being queried. In the case of Elasticsearch SQL, this
|
||||
is the name of the Elasticsearch cluster. This function should always return a non-null
|
||||
value.
|
||||
|
||||
["source","sql",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs.csv-spec[database]
|
||||
--------------------------------------------------
|
||||
|
||||
[[sql-functions-system-user]]
|
||||
==== `USER`
|
||||
|
||||
.Synopsis:
|
||||
[source, sql]
|
||||
--------------------------------------------------
|
||||
USER()
|
||||
--------------------------------------------------
|
||||
*Input*:
|
||||
|
||||
*Output*: string
|
||||
|
||||
.Description:
|
||||
|
||||
Returns the username of the authenticated user executing the query. This function can
|
||||
return `null` in case Security is disabled.
|
||||
|
||||
["source","sql",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{sql-specs}/docs.csv-spec[user]
|
||||
--------------------------------------------------
|
|
@ -208,8 +208,8 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper {
|
|||
|
||||
@Override
|
||||
public String getSystemFunctions() throws SQLException {
|
||||
// TODO: sync this with the grammar
|
||||
return EMPTY;
|
||||
// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/system-functions?view=sql-server-2017
|
||||
return "DATABASE, IFNULL, USER";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,223 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.qa.security;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.NotEqualMessageBuilder;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.JDBCType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.columnInfo;
|
||||
|
||||
public class UserFunctionIT extends ESRestTestCase {
|
||||
|
||||
private static final String SQL = "SELECT USER()";
|
||||
// role defined in roles.yml
|
||||
private static final String MINIMAL_ACCESS_ROLE = "rest_minimal";
|
||||
private List<String> users;
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
return RestSqlIT.securitySettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getProtocol() {
|
||||
return RestSqlIT.SSL_ENABLED ? "https" : "http";
|
||||
}
|
||||
|
||||
@Before
|
||||
private void setUpUsers() throws IOException {
|
||||
int usersCount = name.getMethodName().startsWith("testSingle") ? 1 : randomIntBetween(5, 15);
|
||||
users = new ArrayList<String>(usersCount);
|
||||
|
||||
for(int i = 0; i < usersCount; i++) {
|
||||
String randomUserName = randomAlphaOfLengthBetween(1, 15);
|
||||
users.add(randomUserName);
|
||||
createUser(randomUserName, MINIMAL_ACCESS_ROLE);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
private void clearUsers() throws IOException {
|
||||
for (String user : users) {
|
||||
deleteUser(user);
|
||||
}
|
||||
}
|
||||
|
||||
public void testSingleRandomUser() throws IOException {
|
||||
String mode = randomMode().toString();
|
||||
String randomUserName = users.get(0);
|
||||
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("columns", Arrays.asList(
|
||||
columnInfo(mode, "USER", "keyword", JDBCType.VARCHAR, 0)));
|
||||
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName)));
|
||||
Map<String, Object> actual = runSql(randomUserName, mode, SQL);
|
||||
|
||||
assertResponse(expected, actual);
|
||||
}
|
||||
|
||||
public void testSingleRandomUserWithWhereEvaluatingTrue() throws IOException {
|
||||
index("{\"test\":\"doc1\"}",
|
||||
"{\"test\":\"doc2\"}",
|
||||
"{\"test\":\"doc3\"}");
|
||||
String mode = randomMode().toString();
|
||||
String randomUserName = users.get(0);
|
||||
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("columns", Arrays.asList(
|
||||
columnInfo(mode, "USER", "keyword", JDBCType.VARCHAR, 0)));
|
||||
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
|
||||
Arrays.asList(randomUserName),
|
||||
Arrays.asList(randomUserName)));
|
||||
Map<String, Object> actual = runSql(randomUserName, mode, SQL + " FROM test WHERE USER()='" + randomUserName + "' LIMIT 3");
|
||||
assertResponse(expected, actual);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/35980")
|
||||
public void testSingleRandomUserWithWhereEvaluatingFalse() throws IOException {
|
||||
index("{\"test\":\"doc1\"}",
|
||||
"{\"test\":\"doc2\"}",
|
||||
"{\"test\":\"doc3\"}");
|
||||
String mode = randomMode().toString();
|
||||
String randomUserName = users.get(0);
|
||||
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("columns", Arrays.asList(
|
||||
columnInfo(mode, "USER", "keyword", JDBCType.VARCHAR, 0)));
|
||||
expected.put("rows", Collections.<ArrayList<String>>emptyList());
|
||||
String anotherRandomUserName = randomValueOtherThan(randomUserName, () -> randomAlphaOfLengthBetween(1, 15));
|
||||
Map<String, Object> actual = runSql(randomUserName, mode, SQL + " FROM test WHERE USER()='" + anotherRandomUserName + "' LIMIT 3");
|
||||
assertResponse(expected, actual);
|
||||
}
|
||||
|
||||
public void testMultipleRandomUsersAccess() throws IOException {
|
||||
// run 30 queries and pick randomly each time one of the 5-15 users created previously
|
||||
for (int i = 0; i < 30; i++) {
|
||||
String mode = randomMode().toString();
|
||||
String randomlyPickedUsername = randomFrom(users);
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
|
||||
expected.put("columns", Arrays.asList(
|
||||
columnInfo(mode, "USER", "keyword", JDBCType.VARCHAR, 0)));
|
||||
expected.put("rows", Arrays.asList(Arrays.asList(randomlyPickedUsername)));
|
||||
Map<String, Object> actual = runSql(randomlyPickedUsername, mode, SQL);
|
||||
|
||||
// expect the user that ran the query to be the same as the one returned by the `USER()` function
|
||||
assertResponse(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
public void testSingleUserSelectFromIndex() throws IOException {
|
||||
index("{\"test\":\"doc1\"}",
|
||||
"{\"test\":\"doc2\"}",
|
||||
"{\"test\":\"doc3\"}");
|
||||
String mode = randomMode().toString();
|
||||
String randomUserName = users.get(0);
|
||||
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("columns", Arrays.asList(
|
||||
columnInfo(mode, "USER", "keyword", JDBCType.VARCHAR, 0)));
|
||||
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
|
||||
Arrays.asList(randomUserName),
|
||||
Arrays.asList(randomUserName)));
|
||||
Map<String, Object> actual = runSql(randomUserName, mode, "SELECT USER() FROM test LIMIT 3");
|
||||
|
||||
assertResponse(expected, actual);
|
||||
}
|
||||
|
||||
private void createUser(String name, String role) throws IOException {
|
||||
Request request = new Request("PUT", "/_xpack/security/user/" + name);
|
||||
XContentBuilder user = JsonXContent.contentBuilder().prettyPrint();
|
||||
user.startObject(); {
|
||||
user.field("password", "testpass");
|
||||
user.field("roles", role);
|
||||
}
|
||||
user.endObject();
|
||||
request.setJsonEntity(Strings.toString(user));
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
private void deleteUser(String name) throws IOException {
|
||||
Request request = new Request("DELETE", "/_xpack/security/user/" + name);
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
private Map<String, Object> runSql(String asUser, String mode, String sql) throws IOException {
|
||||
return runSql(asUser, mode, new StringEntity("{\"query\": \"" + sql + "\"}", ContentType.APPLICATION_JSON));
|
||||
}
|
||||
|
||||
private Map<String, Object> runSql(String asUser, String mode, HttpEntity entity) throws IOException {
|
||||
Request request = new Request("POST", "/_sql");
|
||||
if (false == mode.isEmpty()) {
|
||||
request.addParameter("mode", mode);
|
||||
}
|
||||
if (asUser != null) {
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("es-security-runas-user", asUser);
|
||||
request.setOptions(options);
|
||||
}
|
||||
request.setEntity(entity);
|
||||
return toMap(client().performRequest(request));
|
||||
}
|
||||
|
||||
private void assertResponse(Map<String, Object> expected, Map<String, Object> actual) {
|
||||
if (false == expected.equals(actual)) {
|
||||
NotEqualMessageBuilder message = new NotEqualMessageBuilder();
|
||||
message.compareMaps(actual, expected);
|
||||
fail("Response does not match:\n" + message.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Object> toMap(Response response) throws IOException {
|
||||
try (InputStream content = response.getEntity().getContent()) {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
|
||||
}
|
||||
}
|
||||
|
||||
private String randomMode() {
|
||||
return randomFrom("plain", "jdbc", "");
|
||||
}
|
||||
|
||||
private void index(String... docs) throws IOException {
|
||||
Request request = new Request("POST", "/test/test/_bulk");
|
||||
request.addParameter("refresh", "true");
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
for (String doc : docs) {
|
||||
bulk.append("{\"index\":{}\n");
|
||||
bulk.append(doc + "\n");
|
||||
}
|
||||
request.setJsonEntity(bulk.toString());
|
||||
client().performRequest(request);
|
||||
}
|
||||
}
|
|
@ -108,6 +108,8 @@ SUBSTRING |SCALAR
|
|||
UCASE |SCALAR
|
||||
CAST |SCALAR
|
||||
CONVERT |SCALAR
|
||||
DATABASE |SCALAR
|
||||
USER |SCALAR
|
||||
SCORE |SCORE
|
||||
;
|
||||
|
||||
|
|
|
@ -285,6 +285,8 @@ SUBSTRING |SCALAR
|
|||
UCASE |SCALAR
|
||||
CAST |SCALAR
|
||||
CONVERT |SCALAR
|
||||
DATABASE |SCALAR
|
||||
USER |SCALAR
|
||||
SCORE |SCORE
|
||||
// end::showFunctions
|
||||
;
|
||||
|
@ -1683,3 +1685,27 @@ SELECT null <=> null AS "equals";
|
|||
true
|
||||
// end::nullEqualsCompareTwoNulls
|
||||
;
|
||||
|
||||
// ignored because tests run with a docs-not-worthy cluster name
|
||||
// at the time of this test being ignored, the cluster name was x-pack_plugin_sql_qa_single-node_integTestCluster
|
||||
database-Ignore
|
||||
// tag::database
|
||||
SELECT DATABASE();
|
||||
|
||||
DATABASE
|
||||
---------------
|
||||
elasticsearch
|
||||
// end::database
|
||||
;
|
||||
|
||||
// ignored because tests run with a docs-not-worthy user name
|
||||
// at the time of this test being ignored, there was no user name being used
|
||||
user-Ignore
|
||||
// tag::user
|
||||
SELECT USER();
|
||||
|
||||
USER
|
||||
---------------
|
||||
elastic
|
||||
// end::user
|
||||
;
|
||||
|
|
|
@ -75,6 +75,7 @@ SELECT CAST(emp_no AS BOOL) AS emp_no_cast FROM "test_emp" ORDER BY emp_no LIMIT
|
|||
|
||||
//
|
||||
// SELECT with IS NULL and IS NOT NULL
|
||||
//
|
||||
isNullAndIsNotNull
|
||||
SELECT null IS NULL AS col1, null IS NOT NULL AS col2;
|
||||
isNullAndIsNotNullAndNegation
|
||||
|
@ -84,7 +85,9 @@ SELECT (null = 1) IS NULL AS col1, (null = 1) IS NOT NULL AS col2;
|
|||
isNullAndIsNotNullOverComparisonWithNegation
|
||||
SELECT NOT((null = 1) IS NULL) AS col1, NOT((null = 1) IS NOT NULL) AS col2;
|
||||
|
||||
// with table columns
|
||||
//
|
||||
// SELECT with IS NULL and IS NOT NULL with table columns
|
||||
//
|
||||
isNullAndIsNotNull_onTableColumns
|
||||
SELECT languages IS NULL AS col1, languages IS NOT NULL AS col2 FROM "test_emp" WHERE emp_no IN (10018, 10019, 10020) ORDER BY emp_no;
|
||||
isNullAndIsNotNullAndNegation_onTableColumns
|
||||
|
@ -93,3 +96,21 @@ isNullAndIsNotNullOverComparison_onTableColumns
|
|||
SELECT (languages = 2) IS NULL AS col1, (languages = 2) IS NOT NULL AS col2 FROM test_emp WHERE emp_no IN (10018, 10019, 10020) ORDER BY emp_no;
|
||||
isNullAndIsNotNullOverComparisonWithNegation_onTableColumns
|
||||
SELECT NOT((languages = 2) IS NULL) AS col1, NOT((languages = 2) IS NOT NULL) AS col2 FROM test_emp WHERE emp_no IN (10018, 10019, 10020) ORDER BY emp_no;
|
||||
|
||||
//
|
||||
// SELECT with functions locally evaluated
|
||||
//
|
||||
selectMathPI
|
||||
SELECT PI() AS pi;
|
||||
selectMathPIFromIndex
|
||||
SELECT PI() AS pi FROM test_emp LIMIT 3;
|
||||
selectMathPIFromIndexWithWhereEvaluatingToTrue
|
||||
SELECT PI() AS pi FROM test_emp WHERE ROUND(PI(),2)=3.14;
|
||||
selectMathPIFromIndexWithWhereEvaluatingToTrueAndWithLimit
|
||||
SELECT PI() AS pi FROM test_emp WHERE ROUND(PI(),2)=3.14 LIMIT 3;
|
||||
// AwaitsFix https://github.com/elastic/elasticsearch/issues/35980
|
||||
selectMathPIFromIndexWithWhereEvaluatingToFalse-Ignore
|
||||
SELECT PI() AS pi FROM test_emp WHERE PI()=5;
|
||||
// AwaitsFix https://github.com/elastic/elasticsearch/issues/35980
|
||||
selectMathPIFromIndexWithWhereEvaluatingToFalseAndWithLimit-Ignore
|
||||
SELECT PI() AS pi FROM test_emp WHERE PI()=5 LIMIT 3;
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation;
|
|||
import org.elasticsearch.xpack.sql.plan.logical.With;
|
||||
import org.elasticsearch.xpack.sql.rule.Rule;
|
||||
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
|
@ -60,7 +61,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -77,19 +77,19 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
*/
|
||||
private final IndexResolution indexResolution;
|
||||
/**
|
||||
* Time zone in which we're executing this SQL. It is attached to functions
|
||||
* that deal with date and time.
|
||||
* Per-request specific settings needed in some of the functions (timezone, username and clustername),
|
||||
* to which they are attached.
|
||||
*/
|
||||
private final TimeZone timeZone;
|
||||
private final Configuration configuration;
|
||||
/**
|
||||
* The verifier has the role of checking the analyzed tree for failures and build a list of failures.
|
||||
*/
|
||||
private final Verifier verifier;
|
||||
|
||||
public Analyzer(FunctionRegistry functionRegistry, IndexResolution results, TimeZone timeZone, Verifier verifier) {
|
||||
public Analyzer(Configuration configuration, FunctionRegistry functionRegistry, IndexResolution results, Verifier verifier) {
|
||||
this.configuration = configuration;
|
||||
this.functionRegistry = functionRegistry;
|
||||
this.indexResolution = results;
|
||||
this.timeZone = timeZone;
|
||||
this.verifier = verifier;
|
||||
}
|
||||
|
||||
|
@ -815,7 +815,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
// TODO: look into Generator for significant terms, etc..
|
||||
FunctionDefinition def = functionRegistry.resolveFunction(functionName);
|
||||
Function f = uf.buildResolved(timeZone, def);
|
||||
Function f = uf.buildResolved(configuration, def);
|
||||
|
||||
list.add(f);
|
||||
return f;
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function;
|
||||
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.lang.String.format;
|
||||
|
||||
|
@ -17,7 +18,7 @@ public class FunctionDefinition {
|
|||
*/
|
||||
@FunctionalInterface
|
||||
public interface Builder {
|
||||
Function build(UnresolvedFunction uf, boolean distinct, TimeZone tz);
|
||||
Function build(UnresolvedFunction uf, boolean distinct, Configuration configuration);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
|
|
@ -21,6 +21,8 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum;
|
|||
import org.elasticsearch.xpack.sql.expression.function.aggregate.SumOfSquares;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.VarPop;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.Database;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.User;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek;
|
||||
|
@ -89,6 +91,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.Least;
|
|||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf;
|
||||
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod;
|
||||
import org.elasticsearch.xpack.sql.parser.ParsingException;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
@ -232,6 +235,9 @@ public class FunctionRegistry {
|
|||
def(UCase.class, UCase::new));
|
||||
// DataType conversion
|
||||
addToMap(def(Cast.class, Cast::new, "CONVERT"));
|
||||
// Scalar "meta" functions
|
||||
addToMap(def(Database.class, Database::new),
|
||||
def(User.class, User::new));
|
||||
// Special
|
||||
addToMap(def(Score.class, Score::new));
|
||||
}
|
||||
|
@ -301,7 +307,7 @@ public class FunctionRegistry {
|
|||
*/
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
java.util.function.Function<Location, T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (false == children.isEmpty()) {
|
||||
throw new IllegalArgumentException("expects no arguments");
|
||||
}
|
||||
|
@ -313,6 +319,30 @@ public class FunctionRegistry {
|
|||
return def(function, builder, false, aliases);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a {@linkplain FunctionDefinition} for a no-argument function that
|
||||
* is not aware of time zone, does not support {@code DISTINCT} and needs
|
||||
* the cluster name (DATABASE()) or the user name (USER()).
|
||||
*/
|
||||
@SuppressWarnings("overloads")
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
ConfigurationAwareFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (false == children.isEmpty()) {
|
||||
throw new IllegalArgumentException("expects no arguments");
|
||||
}
|
||||
if (distinct) {
|
||||
throw new IllegalArgumentException("does not support DISTINCT yet it was specified");
|
||||
}
|
||||
return ctorRef.build(location, cfg);
|
||||
};
|
||||
return def(function, builder, false, aliases);
|
||||
}
|
||||
|
||||
interface ConfigurationAwareFunctionBuilder<T> {
|
||||
T build(Location location, Configuration configuration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a {@linkplain FunctionDefinition} for a unary function that is not
|
||||
* aware of time zone and does not support {@code DISTINCT}.
|
||||
|
@ -320,7 +350,7 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
BiFunction<Location, Expression, T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (children.size() != 1) {
|
||||
throw new IllegalArgumentException("expects exactly one argument");
|
||||
}
|
||||
|
@ -339,7 +369,7 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
MultiFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (distinct) {
|
||||
throw new IllegalArgumentException("does not support DISTINCT yet it was specified");
|
||||
}
|
||||
|
@ -359,7 +389,7 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
DistinctAwareUnaryFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (children.size() != 1) {
|
||||
throw new IllegalArgumentException("expects exactly one argument");
|
||||
}
|
||||
|
@ -379,14 +409,14 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
DatetimeUnaryFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (children.size() != 1) {
|
||||
throw new IllegalArgumentException("expects exactly one argument");
|
||||
}
|
||||
if (distinct) {
|
||||
throw new IllegalArgumentException("does not support DISTINCT yet it was specified");
|
||||
}
|
||||
return ctorRef.build(location, children.get(0), tz);
|
||||
return ctorRef.build(location, children.get(0), cfg.timeZone());
|
||||
};
|
||||
return def(function, builder, true, aliases);
|
||||
}
|
||||
|
@ -402,7 +432,7 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
BinaryFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
boolean isBinaryOptionalParamFunction = function.isAssignableFrom(Round.class) || function.isAssignableFrom(Truncate.class);
|
||||
if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) {
|
||||
throw new IllegalArgumentException("expects one or two arguments");
|
||||
|
@ -426,9 +456,9 @@ public class FunctionRegistry {
|
|||
private static FunctionDefinition def(Class<? extends Function> function, FunctionBuilder builder,
|
||||
boolean datetime, String... aliases) {
|
||||
String primaryName = normalize(function.getSimpleName());
|
||||
FunctionDefinition.Builder realBuilder = (uf, distinct, tz) -> {
|
||||
FunctionDefinition.Builder realBuilder = (uf, distinct, cfg) -> {
|
||||
try {
|
||||
return builder.build(uf.location(), uf.children(), distinct, tz);
|
||||
return builder.build(uf.location(), uf.children(), distinct, cfg);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ParsingException("error building [" + primaryName + "]: " + e.getMessage(), e,
|
||||
uf.location().getLineNumber(), uf.location().getColumnNumber());
|
||||
|
@ -438,13 +468,13 @@ public class FunctionRegistry {
|
|||
}
|
||||
|
||||
private interface FunctionBuilder {
|
||||
Function build(Location location, List<Expression> children, boolean distinct, TimeZone tz);
|
||||
Function build(Location location, List<Expression> children, boolean distinct, Configuration cfg);
|
||||
}
|
||||
|
||||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
ThreeParametersFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
boolean isLocateFunction = function.isAssignableFrom(Locate.class);
|
||||
if (isLocateFunction && (children.size() > 3 || children.size() < 2)) {
|
||||
throw new IllegalArgumentException("expects two or three arguments");
|
||||
|
@ -466,7 +496,7 @@ public class FunctionRegistry {
|
|||
@SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do
|
||||
static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
FourParametersFunctionBuilder<T> ctorRef, String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) -> {
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) -> {
|
||||
if (children.size() != 4) {
|
||||
throw new IllegalArgumentException("expects exactly four arguments");
|
||||
}
|
||||
|
@ -492,7 +522,7 @@ public class FunctionRegistry {
|
|||
private static <T extends Function> FunctionDefinition def(Class<T> function,
|
||||
CastFunctionBuilder<T> ctorRef,
|
||||
String... aliases) {
|
||||
FunctionBuilder builder = (location, children, distinct, tz) ->
|
||||
FunctionBuilder builder = (location, children, distinct, cfg) ->
|
||||
ctorRef.build(location, children.get(0), children.get(0).dataType());
|
||||
return def(function, builder, false, aliases);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
|
|||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
|
@ -21,7 +22,6 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
|
@ -80,8 +80,8 @@ public class UnresolvedFunction extends Function implements Unresolvable {
|
|||
/**
|
||||
* Build a function to replace this one after resolving the function.
|
||||
*/
|
||||
public Function buildResolved(TimeZone timeZone, FunctionDefinition def) {
|
||||
return resolutionType.buildResolved(this, timeZone, def);
|
||||
public Function buildResolved(Configuration configuration, FunctionDefinition def) {
|
||||
return resolutionType.buildResolved(this, configuration, def);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -197,8 +197,8 @@ public class UnresolvedFunction extends Function implements Unresolvable {
|
|||
return uf;
|
||||
}
|
||||
@Override
|
||||
public Function buildResolved(UnresolvedFunction uf, TimeZone tz, FunctionDefinition def) {
|
||||
return def.builder().build(uf, false, tz);
|
||||
public Function buildResolved(UnresolvedFunction uf, Configuration cfg, FunctionDefinition def) {
|
||||
return def.builder().build(uf, false, cfg);
|
||||
}
|
||||
@Override
|
||||
protected boolean isValidAlternative(FunctionDefinition def) {
|
||||
|
@ -218,8 +218,8 @@ public class UnresolvedFunction extends Function implements Unresolvable {
|
|||
return uf.withMessage("* is not valid with DISTINCT");
|
||||
}
|
||||
@Override
|
||||
public Function buildResolved(UnresolvedFunction uf, TimeZone tz, FunctionDefinition def) {
|
||||
return def.builder().build(uf, true, tz);
|
||||
public Function buildResolved(UnresolvedFunction uf, Configuration cfg, FunctionDefinition def) {
|
||||
return def.builder().build(uf, true, cfg);
|
||||
}
|
||||
@Override
|
||||
protected boolean isValidAlternative(FunctionDefinition def) {
|
||||
|
@ -239,9 +239,9 @@ public class UnresolvedFunction extends Function implements Unresolvable {
|
|||
return uf.withMessage("Can't extract from *");
|
||||
}
|
||||
@Override
|
||||
public Function buildResolved(UnresolvedFunction uf, TimeZone tz, FunctionDefinition def) {
|
||||
public Function buildResolved(UnresolvedFunction uf, Configuration cfg, FunctionDefinition def) {
|
||||
if (def.datetime()) {
|
||||
return def.builder().build(uf, false, tz);
|
||||
return def.builder().build(uf, false, cfg);
|
||||
}
|
||||
return uf.withMessage("Invalid datetime field [" + uf.name() + "]. Use any datetime function.");
|
||||
}
|
||||
|
@ -266,7 +266,7 @@ public class UnresolvedFunction extends Function implements Unresolvable {
|
|||
/**
|
||||
* Build the real function from this one and resolution metadata.
|
||||
*/
|
||||
protected abstract Function buildResolved(UnresolvedFunction uf, TimeZone tz, FunctionDefinition def);
|
||||
protected abstract Function buildResolved(UnresolvedFunction uf, Configuration cfg, FunctionDefinition def);
|
||||
/**
|
||||
* Is {@code def} a valid alternative for function invocations
|
||||
* of this kind. Used to filter the list of "did you mean"
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.script.Params;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
abstract class BaseSystemFunction extends ScalarFunction {
|
||||
|
||||
private final Configuration configuration;
|
||||
|
||||
BaseSystemFunction(Location location, Configuration configuration) {
|
||||
super(location);
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression replaceChildren(List<Expression> newChildren) {
|
||||
throw new UnsupportedOperationException("this node doesn't have any children");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType dataType() {
|
||||
return DataType.KEYWORD;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean foldable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String functionArgs() {
|
||||
return StringUtils.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptTemplate asScript() {
|
||||
return new ScriptTemplate((String) fold(), Params.EMPTY, DataType.KEYWORD);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract Object fold();
|
||||
|
||||
@Override
|
||||
protected NodeInfo<? extends Expression> info() {
|
||||
return null;
|
||||
}
|
||||
|
||||
protected Configuration configuration() {
|
||||
return configuration;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Database extends BaseSystemFunction {
|
||||
|
||||
public Database(Location location, Configuration configuration) {
|
||||
super(location, configuration);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeInfo<Database> info() {
|
||||
return NodeInfo.create(this, Database::new, configuration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fold() {
|
||||
return configuration().clusterName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), configuration().clusterName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && Objects.equals(configuration().clusterName(), ((Database) obj).configuration().clusterName());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class User extends BaseSystemFunction {
|
||||
|
||||
public User(Location location, Configuration configuration) {
|
||||
super(location, configuration);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeInfo<User> info() {
|
||||
return NodeInfo.create(this, User::new, configuration());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean nullable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fold() {
|
||||
return configuration().username();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), configuration().username());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && Objects.equals(configuration().username(), ((User) obj).configuration().username());
|
||||
}
|
||||
|
||||
}
|
|
@ -97,7 +97,7 @@ public class SysColumns extends Command {
|
|||
|
||||
@Override
|
||||
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
|
||||
boolean isOdbcClient = session.settings().mode() == Mode.ODBC;
|
||||
boolean isOdbcClient = session.configuration().mode() == Mode.ODBC;
|
||||
List<Attribute> output = output(isOdbcClient);
|
||||
String cluster = session.indexResolver().clusterName();
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public class EsQueryExec extends LeafExec {
|
|||
|
||||
@Override
|
||||
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
|
||||
Querier scroller = new Querier(session.client(), session.settings());
|
||||
Querier scroller = new Querier(session.client(), session.configuration());
|
||||
scroller.query(Rows.schema(output), queryContainer, index, listener);
|
||||
}
|
||||
|
||||
|
|
|
@ -9,11 +9,16 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||
import org.elasticsearch.xpack.sql.action.SqlQueryAction;
|
||||
import org.elasticsearch.xpack.sql.action.SqlQueryRequest;
|
||||
import org.elasticsearch.xpack.sql.action.SqlQueryResponse;
|
||||
|
@ -31,16 +36,24 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static org.elasticsearch.xpack.sql.plugin.Transports.clusterName;
|
||||
import static org.elasticsearch.xpack.sql.plugin.Transports.username;
|
||||
|
||||
public class TransportSqlQueryAction extends HandledTransportAction<SqlQueryRequest, SqlQueryResponse> {
|
||||
private final SecurityContext securityContext;
|
||||
private final ClusterService clusterService;
|
||||
private final PlanExecutor planExecutor;
|
||||
private final SqlLicenseChecker sqlLicenseChecker;
|
||||
|
||||
@Inject
|
||||
public TransportSqlQueryAction(TransportService transportService, ActionFilters actionFilters,
|
||||
PlanExecutor planExecutor, SqlLicenseChecker sqlLicenseChecker) {
|
||||
public TransportSqlQueryAction(Settings settings, ClusterService clusterService, TransportService transportService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, PlanExecutor planExecutor,
|
||||
SqlLicenseChecker sqlLicenseChecker) {
|
||||
super(SqlQueryAction.NAME, transportService, actionFilters, (Writeable.Reader<SqlQueryRequest>) SqlQueryRequest::new);
|
||||
|
||||
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ?
|
||||
new SecurityContext(settings, threadPool.getThreadContext()) : null;
|
||||
this.clusterService = clusterService;
|
||||
this.planExecutor = planExecutor;
|
||||
this.sqlLicenseChecker = sqlLicenseChecker;
|
||||
}
|
||||
|
@ -48,17 +61,18 @@ public class TransportSqlQueryAction extends HandledTransportAction<SqlQueryRequ
|
|||
@Override
|
||||
protected void doExecute(Task task, SqlQueryRequest request, ActionListener<SqlQueryResponse> listener) {
|
||||
sqlLicenseChecker.checkIfSqlAllowed(request.mode());
|
||||
operation(planExecutor, request, listener);
|
||||
operation(planExecutor, request, listener, username(securityContext), clusterName(clusterService));
|
||||
}
|
||||
|
||||
/**
|
||||
* Actual implementation of the action. Statically available to support embedded mode.
|
||||
*/
|
||||
public static void operation(PlanExecutor planExecutor, SqlQueryRequest request, ActionListener<SqlQueryResponse> listener) {
|
||||
public static void operation(PlanExecutor planExecutor, SqlQueryRequest request, ActionListener<SqlQueryResponse> listener,
|
||||
String username, String clusterName) {
|
||||
// The configuration is always created however when dealing with the next page, only the timeouts are relevant
|
||||
// the rest having default values (since the query is already created)
|
||||
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(),
|
||||
request.filter(), request.mode());
|
||||
request.filter(), request.mode(), username, clusterName);
|
||||
|
||||
// mode() shouldn't be null
|
||||
QueryMetric metric = QueryMetric.from(request.mode(), request.clientId());
|
||||
|
|
|
@ -8,28 +8,42 @@ package org.elasticsearch.xpack.sql.plugin;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||
import org.elasticsearch.xpack.sql.action.SqlTranslateAction;
|
||||
import org.elasticsearch.xpack.sql.action.SqlTranslateRequest;
|
||||
import org.elasticsearch.xpack.sql.action.SqlTranslateResponse;
|
||||
import org.elasticsearch.xpack.sql.execution.PlanExecutor;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.plugin.Transports.clusterName;
|
||||
import static org.elasticsearch.xpack.sql.plugin.Transports.username;
|
||||
|
||||
/**
|
||||
* Transport action for translating SQL queries into ES requests
|
||||
*/
|
||||
public class TransportSqlTranslateAction extends HandledTransportAction<SqlTranslateRequest, SqlTranslateResponse> {
|
||||
private final SecurityContext securityContext;
|
||||
private final ClusterService clusterService;
|
||||
private final PlanExecutor planExecutor;
|
||||
private final SqlLicenseChecker sqlLicenseChecker;
|
||||
|
||||
@Inject
|
||||
public TransportSqlTranslateAction(TransportService transportService, ActionFilters actionFilters,
|
||||
PlanExecutor planExecutor, SqlLicenseChecker sqlLicenseChecker) {
|
||||
public TransportSqlTranslateAction(Settings settings, ClusterService clusterService, TransportService transportService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, PlanExecutor planExecutor,
|
||||
SqlLicenseChecker sqlLicenseChecker) {
|
||||
super(SqlTranslateAction.NAME, transportService, actionFilters, (Writeable.Reader<SqlTranslateRequest>) SqlTranslateRequest::new);
|
||||
|
||||
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ?
|
||||
new SecurityContext(settings, threadPool.getThreadContext()) : null;
|
||||
this.clusterService = clusterService;
|
||||
this.planExecutor = planExecutor;
|
||||
this.sqlLicenseChecker = sqlLicenseChecker;
|
||||
}
|
||||
|
@ -40,7 +54,8 @@ public class TransportSqlTranslateAction extends HandledTransportAction<SqlTrans
|
|||
|
||||
planExecutor.metrics().translate();
|
||||
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(),
|
||||
request.requestTimeout(), request.pageTimeout(), request.filter(), request.mode());
|
||||
request.requestTimeout(), request.pageTimeout(), request.filter(), request.mode(),
|
||||
username(securityContext), clusterName(clusterService));
|
||||
|
||||
planExecutor.searchSource(cfg, request.query(), request.params(), ActionListener.wrap(
|
||||
searchSourceBuilder -> listener.onResponse(new SqlTranslateResponse(searchSourceBuilder)), listener::onFailure));
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.plugin;
|
||||
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||
|
||||
final class Transports {
|
||||
|
||||
private Transports() {}
|
||||
|
||||
static String username(SecurityContext securityContext) {
|
||||
return securityContext != null && securityContext.getUser() != null ? securityContext.getUser().principal() : null;
|
||||
}
|
||||
|
||||
static String clusterName(ClusterService clusterService) {
|
||||
return clusterService.getClusterName().value();
|
||||
}
|
||||
}
|
|
@ -16,24 +16,29 @@ import java.util.TimeZone;
|
|||
// Typed object holding properties for a given action
|
||||
public class Configuration {
|
||||
public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"),
|
||||
Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN);
|
||||
Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null);
|
||||
|
||||
private final TimeZone timeZone;
|
||||
private final int pageSize;
|
||||
private final TimeValue requestTimeout;
|
||||
private final TimeValue pageTimeout;
|
||||
private final Mode mode;
|
||||
private final String username;
|
||||
private final String clusterName;
|
||||
|
||||
@Nullable
|
||||
private QueryBuilder filter;
|
||||
|
||||
public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode) {
|
||||
public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode,
|
||||
String username, String clusterName) {
|
||||
this.timeZone = tz;
|
||||
this.pageSize = pageSize;
|
||||
this.requestTimeout = requestTimeout;
|
||||
this.pageTimeout = pageTimeout;
|
||||
this.filter = filter;
|
||||
this.mode = mode == null ? Mode.PLAIN : mode;
|
||||
this.username = username;
|
||||
this.clusterName = clusterName;
|
||||
}
|
||||
|
||||
public TimeZone timeZone() {
|
||||
|
@ -55,8 +60,15 @@ public class Configuration {
|
|||
public QueryBuilder filter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public Mode mode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public String username() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public String clusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,15 +41,14 @@ public class SqlSession {
|
|||
private final Optimizer optimizer;
|
||||
private final Planner planner;
|
||||
|
||||
// TODO rename to `configuration`
|
||||
private final Configuration settings;
|
||||
private final Configuration configuration;
|
||||
|
||||
public SqlSession(SqlSession other) {
|
||||
this(other.settings, other.client, other.functionRegistry, other.indexResolver,
|
||||
this(other.configuration, other.client, other.functionRegistry, other.indexResolver,
|
||||
other.preAnalyzer, other.verifier, other.optimizer, other.planner);
|
||||
}
|
||||
|
||||
public SqlSession(Configuration settings, Client client, FunctionRegistry functionRegistry,
|
||||
public SqlSession(Configuration configuration, Client client, FunctionRegistry functionRegistry,
|
||||
IndexResolver indexResolver,
|
||||
PreAnalyzer preAnalyzer,
|
||||
Verifier verifier,
|
||||
|
@ -64,7 +63,7 @@ public class SqlSession {
|
|||
this.planner = planner;
|
||||
this.verifier = verifier;
|
||||
|
||||
this.settings = settings;
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
public FunctionRegistry functionRegistry() {
|
||||
|
@ -102,7 +101,7 @@ public class SqlSession {
|
|||
}
|
||||
|
||||
preAnalyze(parsed, c -> {
|
||||
Analyzer analyzer = new Analyzer(functionRegistry, c, settings.timeZone(), verifier);
|
||||
Analyzer analyzer = new Analyzer(configuration, functionRegistry, c, verifier);
|
||||
return analyzer.analyze(parsed, verify);
|
||||
}, listener);
|
||||
}
|
||||
|
@ -114,7 +113,7 @@ public class SqlSession {
|
|||
}
|
||||
|
||||
preAnalyze(parsed, r -> {
|
||||
Analyzer analyzer = new Analyzer(functionRegistry, r, settings.timeZone(), verifier);
|
||||
Analyzer analyzer = new Analyzer(configuration, functionRegistry, r, verifier);
|
||||
return analyzer.debugAnalyze(parsed);
|
||||
}, listener);
|
||||
}
|
||||
|
@ -166,7 +165,7 @@ public class SqlSession {
|
|||
}
|
||||
}
|
||||
|
||||
public Configuration settings() {
|
||||
return settings;
|
||||
public Configuration configuration() {
|
||||
return configuration;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
|||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
|
@ -24,7 +25,6 @@ import org.elasticsearch.xpack.sql.type.TypesTests;
|
|||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN;
|
||||
import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD;
|
||||
|
@ -52,7 +52,7 @@ public class FieldAttributeTests extends ESTestCase {
|
|||
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
getIndexResult = IndexResolution.valid(test);
|
||||
analyzer = new Analyzer(functionRegistry, getIndexResult, TimeZone.getTimeZone("UTC"), verifier);
|
||||
analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, verifier);
|
||||
}
|
||||
|
||||
private LogicalPlan plan(String sql) {
|
||||
|
@ -169,7 +169,7 @@ public class FieldAttributeTests extends ESTestCase {
|
|||
|
||||
EsIndex index = new EsIndex("test", mapping);
|
||||
getIndexResult = IndexResolution.valid(index);
|
||||
analyzer = new Analyzer(functionRegistry, getIndexResult, TimeZone.getTimeZone("UTC"), verifier);
|
||||
analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, verifier);
|
||||
|
||||
VerificationException ex = expectThrows(VerificationException.class, () -> plan("SELECT test.bar FROM test"));
|
||||
assertEquals(
|
||||
|
|
|
@ -18,12 +18,12 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.Least;
|
|||
import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
public class VerifierErrorMessagesTests extends ESTestCase {
|
||||
private SqlParser parser = new SqlParser();
|
||||
|
@ -35,7 +35,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private String error(IndexResolution getIndexResult, String sql) {
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC"), new Verifier(new Metrics()));
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics()));
|
||||
AnalysisException e = expectThrows(AnalysisException.class, () -> analyzer.analyze(parser.createStatement(sql), true));
|
||||
assertTrue(e.getMessage().startsWith("Found "));
|
||||
String header = "Found 1 problem(s)\nline ";
|
||||
|
@ -49,7 +49,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private LogicalPlan accept(IndexResolution resolution, String sql) {
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), resolution, TimeZone.getTimeZone("UTC"), new Verifier(new Metrics()));
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), resolution, new Verifier(new Metrics()));
|
||||
return analyzer.analyze(parser.createStatement(sql), true);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.expression.function;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
|
@ -12,6 +13,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
|||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
||||
import org.elasticsearch.xpack.sql.parser.ParsingException;
|
||||
import org.elasticsearch.xpack.sql.proto.Mode;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.tree.LocationTests;
|
||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||
|
@ -35,16 +38,16 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
UnresolvedFunction ur = uf(STANDARD);
|
||||
FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new));
|
||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
// Distinct isn't supported
|
||||
ParsingException e = expectThrows(ParsingException.class, () ->
|
||||
uf(DISTINCT).buildResolved(randomTimeZone(), def));
|
||||
uf(DISTINCT).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("does not support DISTINCT yet it was specified"));
|
||||
|
||||
// Any children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD, mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects no arguments"));
|
||||
}
|
||||
|
||||
|
@ -56,21 +59,21 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
}));
|
||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||
assertFalse(def.datetime());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
// Distinct isn't supported
|
||||
ParsingException e = expectThrows(ParsingException.class, () ->
|
||||
uf(DISTINCT, mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(DISTINCT, mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("does not support DISTINCT yet it was specified"));
|
||||
|
||||
// No children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
|
||||
// Multiple children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
}
|
||||
|
||||
|
@ -83,17 +86,17 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
return new DummyFunction(l);
|
||||
}));
|
||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
assertFalse(def.datetime());
|
||||
|
||||
// No children aren't supported
|
||||
ParsingException e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
|
||||
// Multiple children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
}
|
||||
|
||||
|
@ -101,28 +104,29 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
boolean urIsExtract = randomBoolean();
|
||||
UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class));
|
||||
TimeZone providedTimeZone = randomTimeZone();
|
||||
Configuration providedConfiguration = randomConfiguration(providedTimeZone);
|
||||
FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> {
|
||||
assertEquals(providedTimeZone, tz);
|
||||
assertSame(e, ur.children().get(0));
|
||||
return new DummyFunction(l);
|
||||
}));
|
||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||
assertEquals(ur.location(), ur.buildResolved(providedTimeZone, def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(providedConfiguration, def).location());
|
||||
assertTrue(def.datetime());
|
||||
|
||||
// Distinct isn't supported
|
||||
ParsingException e = expectThrows(ParsingException.class, () ->
|
||||
uf(DISTINCT, mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(DISTINCT, mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("does not support DISTINCT yet it was specified"));
|
||||
|
||||
// No children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
|
||||
// Multiple children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly one argument"));
|
||||
}
|
||||
|
||||
|
@ -134,28 +138,28 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
return new DummyFunction(l);
|
||||
}));
|
||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
assertFalse(def.datetime());
|
||||
|
||||
// Distinct isn't supported
|
||||
ParsingException e = expectThrows(ParsingException.class, () ->
|
||||
uf(DISTINCT, mock(Expression.class), mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(DISTINCT, mock(Expression.class), mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("does not support DISTINCT yet it was specified"));
|
||||
|
||||
// No children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly two arguments"));
|
||||
|
||||
// One child isn't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class)).buildResolved(randomTimeZone(), def));
|
||||
uf(STANDARD, mock(Expression.class)).buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly two arguments"));
|
||||
|
||||
// Many children aren't supported
|
||||
e = expectThrows(ParsingException.class, () ->
|
||||
uf(STANDARD, mock(Expression.class), mock(Expression.class), mock(Expression.class))
|
||||
.buildResolved(randomTimeZone(), def));
|
||||
.buildResolved(randomConfiguration(), def));
|
||||
assertThat(e.getMessage(), endsWith("expects exactly two arguments"));
|
||||
}
|
||||
|
||||
|
@ -189,26 +193,26 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
|
||||
// Resolve by primary name
|
||||
FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
def = r.resolveFunction(r.resolveAlias("Dummy_Function"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
def = r.resolveFunction(r.resolveAlias("dummy_function"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
def = r.resolveFunction(r.resolveAlias("DUMMY_FUNCTION"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
// Resolve by alias
|
||||
def = r.resolveFunction(r.resolveAlias("DumMy_FunC"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
def = r.resolveFunction(r.resolveAlias("dummy_func"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
def = r.resolveFunction(r.resolveAlias("DUMMY_FUNC"));
|
||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||
assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location());
|
||||
|
||||
// Not resolved
|
||||
SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class,
|
||||
|
@ -225,6 +229,28 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||
private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) {
|
||||
return new UnresolvedFunction(LocationTests.randomLocation(), "DUMMY_FUNCTION", resolutionType, Arrays.asList(children));
|
||||
}
|
||||
|
||||
private Configuration randomConfiguration() {
|
||||
return new Configuration(randomTimeZone(),
|
||||
randomIntBetween(0, 1000),
|
||||
new TimeValue(randomNonNegativeLong()),
|
||||
new TimeValue(randomNonNegativeLong()),
|
||||
null,
|
||||
randomFrom(Mode.values()),
|
||||
randomAlphaOfLength(10),
|
||||
randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
private Configuration randomConfiguration(TimeZone providedTimeZone) {
|
||||
return new Configuration(providedTimeZone,
|
||||
randomIntBetween(0, 1000),
|
||||
new TimeValue(randomNonNegativeLong()),
|
||||
new TimeValue(randomNonNegativeLong()),
|
||||
null,
|
||||
randomFrom(Mode.values()),
|
||||
randomAlphaOfLength(10),
|
||||
randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
public static class DummyFunction extends ScalarFunction {
|
||||
public DummyFunction(Location location) {
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||
import org.elasticsearch.xpack.sql.proto.Mode;
|
||||
import org.elasticsearch.xpack.sql.proto.Protocol;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
public class DatabaseFunctionTests extends ESTestCase {
|
||||
|
||||
public void testDatabaseFunctionOutput() {
|
||||
String clusterName = randomAlphaOfLengthBetween(1, 15);
|
||||
SqlParser parser = new SqlParser();
|
||||
EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true));
|
||||
Analyzer analyzer = new Analyzer(
|
||||
new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT,
|
||||
Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, clusterName),
|
||||
new FunctionRegistry(),
|
||||
IndexResolution.valid(test),
|
||||
new Verifier(new Metrics())
|
||||
);
|
||||
|
||||
Project result = (Project) analyzer.analyze(parser.createStatement("SELECT DATABASE()"), true);
|
||||
assertTrue(result.projections().get(0) instanceof Database);
|
||||
assertEquals(clusterName, ((Database) result.projections().get(0)).fold());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||
import org.elasticsearch.xpack.sql.proto.Mode;
|
||||
import org.elasticsearch.xpack.sql.proto.Protocol;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
public class UserFunctionTests extends ESTestCase {
|
||||
|
||||
public void testNoUsernameFunctionOutput() {
|
||||
SqlParser parser = new SqlParser();
|
||||
EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true));
|
||||
Analyzer analyzer = new Analyzer(
|
||||
new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT,
|
||||
Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, randomAlphaOfLengthBetween(1, 15)),
|
||||
new FunctionRegistry(),
|
||||
IndexResolution.valid(test),
|
||||
new Verifier(new Metrics())
|
||||
);
|
||||
|
||||
Project result = (Project) analyzer.analyze(parser.createStatement("SELECT USER()"), true);
|
||||
assertTrue(result.projections().get(0) instanceof User);
|
||||
assertNull(((User) result.projections().get(0)).fold());
|
||||
}
|
||||
}
|
|
@ -13,12 +13,12 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
|||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
public class OptimizerRunTests extends ESTestCase {
|
||||
|
||||
|
@ -36,7 +36,7 @@ public class OptimizerRunTests extends ESTestCase {
|
|||
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
getIndexResult = IndexResolution.valid(test);
|
||||
analyzer = new Analyzer(functionRegistry, getIndexResult, TimeZone.getTimeZone("UTC"), new Verifier(new Metrics()));
|
||||
analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, new Verifier(new Metrics()));
|
||||
optimizer = new Optimizer();
|
||||
}
|
||||
|
||||
|
|
|
@ -16,12 +16,11 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
|||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
|
@ -35,7 +34,7 @@ public class SysCatalogsTests extends ESTestCase {
|
|||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Tuple<Command, SqlSession> sql(String sql) {
|
||||
EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true));
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"),
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test),
|
||||
new Verifier(new Metrics()));
|
||||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true);
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.xpack.sql.type.TypesTests;
|
|||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -42,7 +41,7 @@ public class SysParserTests extends ESTestCase {
|
|||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Tuple<Command, SqlSession> sql(String sql) {
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"),
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test),
|
||||
new Verifier(new Metrics()));
|
||||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true);
|
||||
|
||||
|
|
|
@ -16,12 +16,11 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
|||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class SysTableTypesTests extends ESTestCase {
|
||||
|
@ -30,7 +29,7 @@ public class SysTableTypesTests extends ESTestCase {
|
|||
|
||||
private Tuple<Command, SqlSession> sql(String sql) {
|
||||
EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true));
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"),
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test),
|
||||
new Verifier(new Metrics()));
|
||||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true);
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
|||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
|
||||
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
|
@ -31,7 +32,6 @@ import java.util.Iterator;
|
|||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
|
@ -236,7 +236,7 @@ public class SysTablesTests extends ESTestCase {
|
|||
|
||||
private Tuple<Command, SqlSession> sql(String sql, List<SqlTypedParamValue> params) {
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"),
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test),
|
||||
new Verifier(new Metrics()));
|
||||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql, params), true);
|
||||
|
||||
|
|
|
@ -14,13 +14,13 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
|||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.sql.JDBCType;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.elasticsearch.action.ActionListener.wrap;
|
||||
|
@ -32,7 +32,7 @@ public class SysTypesTests extends ESTestCase {
|
|||
|
||||
private Tuple<Command, SqlSession> sql(String sql) {
|
||||
EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true));
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"), null);
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), null);
|
||||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), false);
|
||||
|
||||
IndexResolver resolver = mock(IndexResolver.class);
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.xpack.sql.parser.SqlParser;
|
|||
import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.LocalExec;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.session.EmptyExecutable;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
|
@ -24,7 +25,6 @@ import org.junit.AfterClass;
|
|||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
|
@ -42,7 +42,7 @@ public class QueryFolderTests extends ESTestCase {
|
|||
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
IndexResolution getIndexResult = IndexResolution.valid(test);
|
||||
analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC"), new Verifier(new Metrics()));
|
||||
analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics()));
|
||||
optimizer = new Optimizer();
|
||||
planner = new Planner();
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery;
|
|||
import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.TermQuery;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.stats.Metrics;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
@ -37,7 +38,6 @@ import org.junit.BeforeClass;
|
|||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation.E;
|
||||
|
@ -57,7 +57,7 @@ public class QueryTranslatorTests extends ESTestCase {
|
|||
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
IndexResolution getIndexResult = IndexResolution.valid(test);
|
||||
analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC"), new Verifier(new Metrics()));
|
||||
analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics()));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
|
|
@ -14,11 +14,11 @@ import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
|||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.session.Configuration;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.stats.FeatureMetric.COMMAND;
|
||||
import static org.elasticsearch.xpack.sql.stats.FeatureMetric.GROUPBY;
|
||||
|
@ -241,7 +241,7 @@ public class VerifierMetricsTests extends ESTestCase {
|
|||
verifier = new Verifier(metrics);
|
||||
}
|
||||
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC"), verifier);
|
||||
Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), verifier);
|
||||
analyzer.analyze(parser.createStatement(sql), true);
|
||||
|
||||
return metrics == null ? null : metrics.stats();
|
||||
|
|
Loading…
Reference in New Issue