mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-09 14:35:04 +00:00
Arithmetic
* big refactor of Processor by introducing ProcessorDefinition an immutable tree structure used for resolving multiple inputs across folding (in particular for aggregations) which at runtime gets translated into 'compiled' or small Processors Add expression arithmetic, expression folding and type coercion Folding * for literals, scalars and inside the optimizer Type validation happens per type hierarchy (numeric vs decimal) not type Ceil/Floor/Round functions return long/int instead of double ScalarFunction preserves ProcessorDefinition instead of functionId Original commit: elastic/x-pack-elasticsearch@a703f8b455
This commit is contained in:
parent
b8f5720283
commit
87293272d8
@ -27,14 +27,14 @@ public class ExplainIT extends CliIntegrationTestCase {
|
|||||||
assertThat(in.readLine(), startsWith("----------"));
|
assertThat(in.readLine(), startsWith("----------"));
|
||||||
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
|
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
|
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
|
||||||
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][test_field{r}#"));
|
assertThat(in.readLine(), startsWith(" \\EsRelation[test][test_field{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test");
|
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test");
|
||||||
assertThat(in.readLine(), containsString("plan"));
|
assertThat(in.readLine(), containsString("plan"));
|
||||||
assertThat(in.readLine(), startsWith("----------"));
|
assertThat(in.readLine(), startsWith("----------"));
|
||||||
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
|
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_CatalogTable[test][test_field{r}#"));
|
assertThat(in.readLine(), startsWith("\\EsRelation[test][test_field{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
// TODO in this case we should probably remove the source filtering entirely. Right? It costs but we don't need it.
|
// TODO in this case we should probably remove the source filtering entirely. Right? It costs but we don't need it.
|
||||||
@ -71,7 +71,7 @@ public class ExplainIT extends CliIntegrationTestCase {
|
|||||||
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
|
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
|
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
|
||||||
assertThat(in.readLine(), startsWith(" \\_SubQueryAlias[test]"));
|
assertThat(in.readLine(), startsWith(" \\_SubQueryAlias[test]"));
|
||||||
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
|
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2");
|
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2");
|
||||||
@ -79,7 +79,7 @@ public class ExplainIT extends CliIntegrationTestCase {
|
|||||||
assertThat(in.readLine(), startsWith("----------"));
|
assertThat(in.readLine(), startsWith("----------"));
|
||||||
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
|
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
|
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
|
||||||
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
|
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test WHERE i = 2");
|
command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test WHERE i = 2");
|
||||||
@ -124,14 +124,14 @@ public class ExplainIT extends CliIntegrationTestCase {
|
|||||||
assertThat(in.readLine(), startsWith("----------"));
|
assertThat(in.readLine(), startsWith("----------"));
|
||||||
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
|
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
|
||||||
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
|
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test");
|
command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test");
|
||||||
assertThat(in.readLine(), containsString("plan"));
|
assertThat(in.readLine(), containsString("plan"));
|
||||||
assertThat(in.readLine(), startsWith("----------"));
|
assertThat(in.readLine(), startsWith("----------"));
|
||||||
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
||||||
assertThat(in.readLine(), startsWith("\\_CatalogTable[test][i{r}#"));
|
assertThat(in.readLine(), startsWith("\\EsRelation[test][i{r}#"));
|
||||||
assertEquals("", in.readLine());
|
assertEquals("", in.readLine());
|
||||||
|
|
||||||
command("EXPLAIN (PLAN EXECUTABLE) SELECT COUNT(*) FROM test");
|
command("EXPLAIN (PLAN EXECUTABLE) SELECT COUNT(*) FROM test");
|
||||||
|
@ -9,15 +9,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
|||||||
|
|
||||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||||
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcTestUtils;
|
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcTestUtils;
|
||||||
|
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
|
||||||
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@TestLogging(JdbcTestUtils.SQL_TRACE)
|
@TestLogging(JdbcTestUtils.SQL_TRACE)
|
||||||
public abstract class DebugSqlSpec extends SqlSpecIT {
|
public class DebugSqlSpec extends SqlSpecIT {
|
||||||
|
public static LocalH2 H2 = new LocalH2();
|
||||||
|
|
||||||
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecIT.PARAM_FORMATTING)
|
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||||
public static List<Object[]> readScriptSpec() throws Exception {
|
public static List<Object[]> readScriptSpec() throws Exception {
|
||||||
|
|
||||||
Parser parser = specParser();
|
Parser parser = specParser();
|
||||||
return readScriptSpec("/debug.sql-spec", parser);
|
return readScriptSpec("/debug.sql-spec", parser);
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,9 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
|
|||||||
readScriptSpec("/filter.sql-spec", parser),
|
readScriptSpec("/filter.sql-spec", parser),
|
||||||
readScriptSpec("/datetime.sql-spec", parser),
|
readScriptSpec("/datetime.sql-spec", parser),
|
||||||
readScriptSpec("/math.sql-spec", parser),
|
readScriptSpec("/math.sql-spec", parser),
|
||||||
readScriptSpec("/agg.sql-spec", parser));
|
readScriptSpec("/agg.sql-spec", parser),
|
||||||
|
readScriptSpec("/arithmetic.sql-spec", parser)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOCOMMIT: add tests for nested docs when interplug communication is enabled
|
// NOCOMMIT: add tests for nested docs when interplug communication is enabled
|
||||||
|
@ -13,6 +13,7 @@ import org.junit.AfterClass;
|
|||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.URL;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
@ -123,7 +124,11 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||||||
|
|
||||||
// returns groupName, testName, its line location, its source and the custom object (based on each test parser)
|
// returns groupName, testName, its line location, its source and the custom object (based on each test parser)
|
||||||
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
|
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
|
||||||
Path source = PathUtils.get(SpecBaseIntegrationTestCase.class.getResource(url).toURI());
|
URL resource = SpecBaseIntegrationTestCase.class.getResource(url);
|
||||||
|
if (resource == null) {
|
||||||
|
throw new IllegalArgumentException("Cannot find resource " + url);
|
||||||
|
}
|
||||||
|
Path source = PathUtils.get(resource.toURI());
|
||||||
String fileName = source.getFileName().toString();
|
String fileName = source.getFileName().toString();
|
||||||
int dot = fileName.indexOf(".");
|
int dot = fileName.indexOf(".");
|
||||||
String groupName = dot > 0 ? fileName.substring(0, dot) : fileName;
|
String groupName = dot > 0 ? fileName.substring(0, dot) : fileName;
|
||||||
|
13
sql/jdbc/src/test/resources/arithmetic.csv-spec
Normal file
13
sql/jdbc/src/test/resources/arithmetic.csv-spec
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
//
|
||||||
|
// Arithmetic tests outside H2
|
||||||
|
//
|
||||||
|
|
||||||
|
// the standard behavior here is to return the constant for each element
|
||||||
|
// the weird thing is that an actual query needs to be ran
|
||||||
|
arithmeticWithFrom
|
||||||
|
SELECT 5 - 2 x FROM test_emp;
|
||||||
|
|
||||||
|
x
|
||||||
|
3
|
||||||
|
;
|
||||||
|
|
65
sql/jdbc/src/test/resources/arithmetic.sql-spec
Normal file
65
sql/jdbc/src/test/resources/arithmetic.sql-spec
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
//
|
||||||
|
// Arithmetic tests
|
||||||
|
//
|
||||||
|
|
||||||
|
unaryMinus
|
||||||
|
SELECT - 1 AS x;
|
||||||
|
plus
|
||||||
|
SELECT 1 + 1 AS x;
|
||||||
|
minus
|
||||||
|
SELECT 1 - 1 AS x;
|
||||||
|
divide
|
||||||
|
SELECT 6 / 3 AS x;
|
||||||
|
multiply
|
||||||
|
SELECT 2 * 3 AS x;
|
||||||
|
mod
|
||||||
|
SELECT 5 % 2 AS x;
|
||||||
|
operatorsPriority
|
||||||
|
SELECT 1 + 3 * 4 / 2 - 2 AS x;
|
||||||
|
operatorsPriorityWithParanthesis
|
||||||
|
SELECT ((1 + 3) * 2 / (3 - 1)) * 2 AS x;
|
||||||
|
literalAliasing
|
||||||
|
SELECT 2 + 3 AS x, 'foo' y;
|
||||||
|
|
||||||
|
// variable scalar arithmetic
|
||||||
|
scalarVariablePlus
|
||||||
|
SELECT emp_no + 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableMinus
|
||||||
|
SELECT emp_no - 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableMul
|
||||||
|
SELECT emp_no * 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableDiv
|
||||||
|
SELECT emp_no / 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableMod
|
||||||
|
SELECT emp_no % 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableMultipleInputs
|
||||||
|
SELECT (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
|
||||||
|
scalarVariableTwoInputs
|
||||||
|
SELECT (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
|
||||||
|
scalarVariableThreeInputs
|
||||||
|
SELECT ((emp_no % 10000) + YEAR(hire_date)) / MONTH(birth_date) AS x FROM test_emp;
|
||||||
|
scalarVariableArithmeticAndEntry
|
||||||
|
SELECT emp_no, emp_no % 10000 AS x FROM test_emp;
|
||||||
|
scalarVariableTwoInputsAndEntry
|
||||||
|
SELECT emp_no, (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
|
||||||
|
scalarVariableThreeInputsAndEntry
|
||||||
|
SELECT emp_no, ((emp_no % 10000) + YEAR(hire_date)) / MONTH(birth_date) AS x FROM test_emp;
|
||||||
|
|
||||||
|
|
||||||
|
// variable scalar agg
|
||||||
|
aggVariablePlus
|
||||||
|
SELECT COUNT(*) + 10000 AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableMinus
|
||||||
|
SELECT COUNT(*) - 10000 AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableMul
|
||||||
|
SELECT COUNT(*) * 2 AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableDiv
|
||||||
|
SELECT COUNT(*) / 5000 AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableMod
|
||||||
|
SELECT COUNT(*) % 10000 AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableTwoInputs
|
||||||
|
SELECT MAX(emp_no) - MIN(emp_no) AS x FROM test_emp GROUP BY gender;
|
||||||
|
aggVariableThreeInputs
|
||||||
|
SELECT (MAX(emp_no) - MIN(emp_no)) + AVG(emp_no) AS x FROM test_emp GROUP BY gender;
|
||||||
|
|
||||||
|
|
@ -9,7 +9,7 @@
|
|||||||
columnDetectionOverride
|
columnDetectionOverride
|
||||||
SELECT gender, FLOOR(PERCENTILE(emp_no, 97.76)) p1 FROM test_emp GROUP BY gender;
|
SELECT gender, FLOOR(PERCENTILE(emp_no, 97.76)) p1 FROM test_emp GROUP BY gender;
|
||||||
|
|
||||||
gender:s | p1:double
|
gender:s | p1:l
|
||||||
M | 10095
|
M | 10095
|
||||||
F | 10099
|
F | 10099
|
||||||
;
|
;
|
@ -3,9 +3,11 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
debug
|
debug
|
||||||
SELECT gender, PERCENTILE(emp_no, 97.76) p1, PERCENTILE(emp_no, 93.3) p2, PERCENTILE_RANK(emp_no, 10025) rank FROM test_emp GROUP BY gender;
|
// resolution should happen on the adjiacent nodes as well
|
||||||
|
//SELECT 1+2+3 x, x + 3 AS y, y FROM test_emp;
|
||||||
|
|
||||||
gender | p1 | p2 | rank
|
SELECT 2 + 3 x, 'foo', x + 1 z;
|
||||||
M | 10095.6112 | 10090.846 | 23.41269841269841
|
|
||||||
F | 10099.1936 | 10096.351999999999 | 26.351351351351347
|
x | 'foo' | z
|
||||||
|
5 | 'foo' | 6
|
||||||
;
|
;
|
@ -3,4 +3,6 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
debug
|
debug
|
||||||
SELECT * FROM test_emp WHERE emp_no IS NULL ORDER BY emp_no LIMIT 5 ;
|
SELECT emp_no, CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
|
||||||
|
//SELECT YEAR(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY YEAR(birth_date) ORDER BY YEAR(birth_date) LIMIT 5;
|
||||||
|
//SELECT emp_no, SIN(emp_no) + emp_no % 10000 + YEAR(hire_date) / 1000 AS s, emp_no AS y FROM test_emp WHERE emp_no = 10010;
|
||||||
|
@ -13,7 +13,8 @@ SELECT ATAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY
|
|||||||
//mathCbrt
|
//mathCbrt
|
||||||
//SELECT CBRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
//SELECT CBRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
||||||
mathCeil
|
mathCeil
|
||||||
SELECT CEIL(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
// H2 returns CEIL as a double despite the value being an integer; we return a long as the other DBs
|
||||||
|
SELECT CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
||||||
mathCos
|
mathCos
|
||||||
SELECT COS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
SELECT COS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
||||||
mathCosh
|
mathCosh
|
||||||
@ -62,7 +63,7 @@ SELECT emp_no, ASIN(emp_no) m, first_name FROM "test_emp" WHERE ASIN(emp_no) < 1
|
|||||||
//mathATanFilterAndOrder
|
//mathATanFilterAndOrder
|
||||||
//SELECT emp_no, ATAN(emp_no) m, first_name FROM "test_emp" WHERE ATAN(emp_no) < 10010 ORDER BY ATAN(emp_no);
|
//SELECT emp_no, ATAN(emp_no) m, first_name FROM "test_emp" WHERE ATAN(emp_no) < 10010 ORDER BY ATAN(emp_no);
|
||||||
mathCeilFilterAndOrder
|
mathCeilFilterAndOrder
|
||||||
SELECT emp_no, CEIL(emp_no) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
|
SELECT emp_no, CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
|
||||||
//mathCosFilterAndOrder
|
//mathCosFilterAndOrder
|
||||||
//SELECT emp_no, COS(emp_no) m, first_name FROM "test_emp" WHERE COS(emp_no) < 10010 ORDER BY COS(emp_no);
|
//SELECT emp_no, COS(emp_no) m, first_name FROM "test_emp" WHERE COS(emp_no) < 10010 ORDER BY COS(emp_no);
|
||||||
//mathCoshFilterAndOrder
|
//mathCoshFilterAndOrder
|
||||||
|
@ -34,11 +34,13 @@ import org.elasticsearch.xpack.sql.expression.function.Functions;
|
|||||||
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
|
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.ArithmeticFunction;
|
||||||
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
|
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
|
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.CatalogTable;
|
import org.elasticsearch.xpack.sql.plan.logical.EsRelation;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Filter;
|
import org.elasticsearch.xpack.sql.plan.logical.Filter;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Join;
|
import org.elasticsearch.xpack.sql.plan.logical.Join;
|
||||||
|
import org.elasticsearch.xpack.sql.plan.logical.LocalRelation;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
|
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||||
@ -49,7 +51,10 @@ import org.elasticsearch.xpack.sql.rule.Rule;
|
|||||||
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Node;
|
import org.elasticsearch.xpack.sql.tree.Node;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.NodeUtils;
|
||||||
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -90,7 +95,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
new ResolveFunctions(),
|
new ResolveFunctions(),
|
||||||
new ResolveAliases(),
|
new ResolveAliases(),
|
||||||
new ProjectedAggregations(),
|
new ProjectedAggregations(),
|
||||||
new ResolveAggsInHavingAndOrderBy()
|
new ResolveAggsInHavingAndOrderBy()
|
||||||
|
//new ImplicitCasting()
|
||||||
);
|
);
|
||||||
// TODO: this might be removed since the deduplication happens already in ResolveFunctions
|
// TODO: this might be removed since the deduplication happens already in ResolveFunctions
|
||||||
Batch deduplication = new Batch("Deduplication",
|
Batch deduplication = new Batch("Deduplication",
|
||||||
@ -226,6 +232,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
}
|
}
|
||||||
return ur;
|
return ur;
|
||||||
}
|
}
|
||||||
|
// inlined queries (SELECT 1 + 2) are already resolved
|
||||||
|
else if (p instanceof LocalRelation) {
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
return p.transformExpressionsDown(e -> {
|
return p.transformExpressionsDown(e -> {
|
||||||
if (e instanceof SubQueryExpression) {
|
if (e instanceof SubQueryExpression) {
|
||||||
SubQueryExpression sq = (SubQueryExpression) e;
|
SubQueryExpression sq = (SubQueryExpression) e;
|
||||||
@ -234,6 +245,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
return e;
|
return e;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean skipResolved() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class ResolveTable extends AnalyzeRule<UnresolvedRelation> {
|
private class ResolveTable extends AnalyzeRule<UnresolvedRelation> {
|
||||||
@ -250,7 +266,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
throw new UnknownIndexException(table.index(), plan);
|
throw new UnknownIndexException(table.index(), plan);
|
||||||
}
|
}
|
||||||
|
|
||||||
LogicalPlan catalogTable = new CatalogTable(plan.location(), found);
|
LogicalPlan catalogTable = new EsRelation(plan.location(), found);
|
||||||
SubQueryAlias sa = new SubQueryAlias(plan.location(), catalogTable, table.index());
|
SubQueryAlias sa = new SubQueryAlias(plan.location(), catalogTable, table.index());
|
||||||
|
|
||||||
if (plan.alias() != null) {
|
if (plan.alias() != null) {
|
||||||
@ -466,7 +482,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
if (ordinal != null) {
|
if (ordinal != null) {
|
||||||
changed = true;
|
changed = true;
|
||||||
if (ordinal > 0 && ordinal <= max) {
|
if (ordinal > 0 && ordinal <= max) {
|
||||||
NamedExpression reference = aggregates.get(ordinal);
|
NamedExpression reference = aggregates.get(ordinal - 1);
|
||||||
if (containsAggregate(reference)) {
|
if (containsAggregate(reference)) {
|
||||||
throw new AnalysisException(exp, "Group ordinal %d refers to an aggregate function %s which is not compatible/allowed with GROUP BY", ordinal, reference.nodeName());
|
throw new AnalysisException(exp, "Group ordinal %d refers to an aggregate function %s which is not compatible/allowed with GROUP BY", ordinal, reference.nodeName());
|
||||||
}
|
}
|
||||||
@ -724,8 +740,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
}
|
}
|
||||||
if (child instanceof Cast) {
|
if (child instanceof Cast) {
|
||||||
Cast c = (Cast) child;
|
Cast c = (Cast) child;
|
||||||
if (c.argument() instanceof NamedExpression) {
|
if (c.field() instanceof NamedExpression) {
|
||||||
return new Alias(c.location(), ((NamedExpression) c.argument()).name(), c);
|
return new Alias(c.location(), ((NamedExpression) c.field()).name(), c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//TODO: maybe add something closer to SQL
|
//TODO: maybe add something closer to SQL
|
||||||
@ -966,6 +982,52 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class ImplicitCasting extends AnalyzeRule<LogicalPlan> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean skipResolved() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected LogicalPlan rule(LogicalPlan plan) {
|
||||||
|
return plan.transformExpressionsDown(this::implicitCast);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Expression implicitCast(Expression e) {
|
||||||
|
if (!e.childrenResolved()) {
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
|
||||||
|
Expression left = null, right = null;
|
||||||
|
|
||||||
|
// BinaryOperations are ignored as they are pushed down to ES
|
||||||
|
// and casting (and thus Aliasing when folding) gets in the way
|
||||||
|
|
||||||
|
if (e instanceof ArithmeticFunction) {
|
||||||
|
ArithmeticFunction f = (ArithmeticFunction) e;
|
||||||
|
left = f.left();
|
||||||
|
right = f.right();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (left != null) {
|
||||||
|
DataType l = left.dataType();
|
||||||
|
DataType r = right.dataType();
|
||||||
|
if (!l.same(r)) {
|
||||||
|
DataType common = DataTypeConversion.commonType(l, r);
|
||||||
|
if (common == null) {
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
left = l.same(common) ? left : new Cast(left.location(), left, common);
|
||||||
|
right = r.same(common) ? right : new Cast(right.location(), right, common);
|
||||||
|
return NodeUtils.copyTree(e, Arrays.asList(left, right));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
abstract static class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
|
abstract static class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
|
||||||
|
|
||||||
// transformUp (post-order) - that is first children and then the node
|
// transformUp (post-order) - that is first children and then the node
|
||||||
|
@ -11,11 +11,9 @@ import org.elasticsearch.xpack.sql.expression.Expressions;
|
|||||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.Functions;
|
import org.elasticsearch.xpack.sql.expression.function.Functions;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
|
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
|
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Filter;
|
import org.elasticsearch.xpack.sql.plan.logical.Filter;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Node;
|
import org.elasticsearch.xpack.sql.tree.Node;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -103,9 +101,6 @@ abstract class Verifier {
|
|||||||
else if (ae instanceof Attribute && !ae.resolved()) {
|
else if (ae instanceof Attribute && !ae.resolved()) {
|
||||||
localFailures.add(fail(e, "Cannot resolved '%s' from columns %s", Expressions.name(ae), p.intputSet()));
|
localFailures.add(fail(e, "Cannot resolved '%s' from columns %s", Expressions.name(ae), p.intputSet()));
|
||||||
}
|
}
|
||||||
else if (ae instanceof Cast && !(p instanceof Project || p instanceof Aggregate)) {
|
|
||||||
localFailures.add(fail(ae, "Cast is (currently) only supported in SELECT and GROUP BY; not in %s", p.nodeName()));
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// consider only nodes that are by themselves unresolved (to avoid unresolved dependencies)
|
// consider only nodes that are by themselves unresolved (to avoid unresolved dependencies)
|
||||||
|
@ -0,0 +1,126 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.execution.search;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aggregations are returned by Elasticsearch in a tree structure where each nested level can have a different size.
|
||||||
|
* For example a group by a, b, c results in 3-level nested array where each level contains all the relevant values
|
||||||
|
* for its parent entry.
|
||||||
|
* Assuming there's a total of 2 A's, 3 B's and 5 C's, the values will be
|
||||||
|
* A-agg level = { A1, A2 }
|
||||||
|
* B-agg level = { { A1B1, A1B2, A1B3 }, { A2B1, A2B2, A2B3 }
|
||||||
|
* C-agg level = { { { A1B1C1, A1B1C2 ..}, { A1B2C1, etc... } } } and so on
|
||||||
|
*
|
||||||
|
* Further more the columns are added in the order in which they are requested (0, 1, 2) eliminating the need for keys as these are implicit (their position in the list).
|
||||||
|
*
|
||||||
|
* To help with the iteration, there are two dedicated counters :
|
||||||
|
* one that carries (increments) the counter for each level (indicated by the position inside the array) once the children reach their max
|
||||||
|
* a flat cursor to indicate the row
|
||||||
|
*/
|
||||||
|
class AggValues {
|
||||||
|
private int row = 0;
|
||||||
|
|
||||||
|
private final List<Object[]> columns;
|
||||||
|
private int[] indexPerLevel;
|
||||||
|
private int size;
|
||||||
|
|
||||||
|
AggValues(List<Object[]> columns) {
|
||||||
|
this.columns = columns;
|
||||||
|
}
|
||||||
|
|
||||||
|
void init(int maxDepth, int limit) {
|
||||||
|
int sz = computeSize(columns, maxDepth);
|
||||||
|
size = limit > 0 ? Math.min(limit, sz) : sz;
|
||||||
|
indexPerLevel = new int[maxDepth + 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int computeSize(List<Object[]> columns, int maxDepth) {
|
||||||
|
// look only at arrays with the right depth (the others might be
|
||||||
|
// counters or other functions)
|
||||||
|
// then return the parent array to compute the actual returned results
|
||||||
|
Object[] leafArray = null;
|
||||||
|
for (int i = 0; i < columns.size() && leafArray == null; i++) {
|
||||||
|
Object[] col = columns.get(i);
|
||||||
|
Object o = col;
|
||||||
|
int level = 0;
|
||||||
|
Object[] parent = null;
|
||||||
|
// keep unwrapping until the desired level is reached
|
||||||
|
while (o instanceof Object[]) {
|
||||||
|
col = ((Object[]) o);
|
||||||
|
if (col.length > 0) {
|
||||||
|
if (level == maxDepth) {
|
||||||
|
leafArray = parent;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
parent = col;
|
||||||
|
level++;
|
||||||
|
o = col[0];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
o = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (leafArray == null) {
|
||||||
|
return columns.get(0).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
int sz = 0;
|
||||||
|
for (Object leaf : leafArray) {
|
||||||
|
sz += ((Object[]) leaf).length;
|
||||||
|
}
|
||||||
|
return sz;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object column(int column) {
|
||||||
|
Object o = columns.get(column);
|
||||||
|
|
||||||
|
for (int lvl = 0; o instanceof Object[]; lvl++) {
|
||||||
|
Object[] arr = (Object[]) o;
|
||||||
|
// the current branch is done
|
||||||
|
if (indexPerLevel[lvl] == arr.length) {
|
||||||
|
// reset the current branch
|
||||||
|
indexPerLevel[lvl] = 0;
|
||||||
|
// bump the parent - if it's too big it, the loop will restart
|
||||||
|
// again from that position
|
||||||
|
indexPerLevel[lvl - 1]++;
|
||||||
|
// restart the loop
|
||||||
|
lvl = -1;
|
||||||
|
o = columns.get(column);
|
||||||
|
} else {
|
||||||
|
o = arr[indexPerLevel[lvl]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return o;
|
||||||
|
}
|
||||||
|
|
||||||
|
int size() {
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
void reset() {
|
||||||
|
row = 0;
|
||||||
|
Arrays.fill(indexPerLevel, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean nextRow() {
|
||||||
|
if (row < size - 1) {
|
||||||
|
row++;
|
||||||
|
// increment leaf counter - the size check is done lazily while retrieving the columns
|
||||||
|
indexPerLevel[indexPerLevel.length - 1]++;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean hasCurrentRow() {
|
||||||
|
return row < size;
|
||||||
|
}
|
||||||
|
}
|
@ -5,106 +5,28 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
|
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
|
||||||
import org.elasticsearch.xpack.sql.session.Cursor;
|
import org.elasticsearch.xpack.sql.session.Cursor;
|
||||||
import org.elasticsearch.xpack.sql.type.Schema;
|
import org.elasticsearch.xpack.sql.type.Schema;
|
||||||
|
|
||||||
//
|
import java.util.List;
|
||||||
// Aggregations are returned in a tree structure where each nested level can have a different size.
|
import java.util.function.Supplier;
|
||||||
// For example a group by a, b, c results in 3-level nested array where each level contains all the relevant values
|
|
||||||
// for its parent entry.
|
|
||||||
// Assuming there's a total of 2 A's, 3 B's and 5 C's, the values will be
|
|
||||||
// A-agg level = { A1, A2 }
|
|
||||||
// B-agg level = { { A1B1, A1B2, A1B3 }, { A2B1, A2B2, A2B3 }
|
|
||||||
// C-agg level = { { { A1B1C1, A1B1C2 ..}, { A1B2C1, etc... } } } and so on
|
|
||||||
//
|
|
||||||
// To help with the iteration, there are two dedicated counters :
|
|
||||||
// - one that carries (increments) the counter for each level (indicated by the position inside the array) once the children reach their max
|
|
||||||
// - a flat cursor to indicate the row
|
|
||||||
|
|
||||||
class AggsRowSetCursor extends AbstractRowSetCursor {
|
class AggsRowSetCursor extends AbstractRowSetCursor {
|
||||||
|
|
||||||
private int row = 0;
|
private int row = 0;
|
||||||
|
private final AggValues agg;
|
||||||
|
private final List<Supplier<Object>> columns;
|
||||||
|
|
||||||
private final List<Object[]> columns;
|
AggsRowSetCursor(Schema schema, AggValues agg, List<Supplier<Object>> columns) {
|
||||||
private final int[] indexPerLevel;
|
|
||||||
private final int size;
|
|
||||||
|
|
||||||
AggsRowSetCursor(Schema schema, List<Object[]> columns, int maxDepth, int limit) {
|
|
||||||
super(schema, null);
|
super(schema, null);
|
||||||
|
this.agg = agg;
|
||||||
this.columns = columns;
|
this.columns = columns;
|
||||||
|
|
||||||
int sz = computeSize(columns, maxDepth);
|
|
||||||
size = limit > 0 ? Math.min(limit, sz) : sz;
|
|
||||||
indexPerLevel = new int[maxDepth + 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int computeSize(List<Object[]> columns, int maxDepth) {
|
|
||||||
// look only at arrays with the right depth (the others might be counters or other functions)
|
|
||||||
// then return the parent array to compute the actual returned results
|
|
||||||
Object[] leafArray = null;
|
|
||||||
for (int i = 0; i < columns.size() && leafArray == null; i++) {
|
|
||||||
Object[] col = columns.get(i);
|
|
||||||
Object o = col;
|
|
||||||
int level = 0;
|
|
||||||
Object[] parent = null;
|
|
||||||
// keep unwrapping until the desired level is reached
|
|
||||||
while (o instanceof Object[]) {
|
|
||||||
col = ((Object[]) o);
|
|
||||||
if (col.length > 0) {
|
|
||||||
if (level == maxDepth) {
|
|
||||||
leafArray = parent;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent = col;
|
|
||||||
level++;
|
|
||||||
o = col[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
o = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (leafArray == null) {
|
|
||||||
return columns.get(0).length;
|
|
||||||
}
|
|
||||||
|
|
||||||
int sz = 0;
|
|
||||||
for (Object leaf : leafArray) {
|
|
||||||
sz += ((Object[]) leaf).length;
|
|
||||||
}
|
|
||||||
return sz;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object getColumn(int column) {
|
protected Object getColumn(int column) {
|
||||||
Object o = columns.get(column);
|
return columns.get(column).get();
|
||||||
|
|
||||||
for (int lvl = 0; o instanceof Object[]; lvl++) {
|
|
||||||
Object[] arr = (Object[]) o;
|
|
||||||
// the current branch is done
|
|
||||||
if (indexPerLevel[lvl] == arr.length) {
|
|
||||||
// reset the current branch
|
|
||||||
indexPerLevel[lvl] = 0;
|
|
||||||
// bump the parent - if it's too big it, the loop will restart again from that position
|
|
||||||
indexPerLevel[lvl - 1]++;
|
|
||||||
// restart the loop
|
|
||||||
lvl = -1;
|
|
||||||
o = columns.get(column);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
o = arr[indexPerLevel[lvl]];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return o;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -114,24 +36,17 @@ class AggsRowSetCursor extends AbstractRowSetCursor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean doNext() {
|
protected boolean doNext() {
|
||||||
if (row < size() - 1) {
|
return agg.nextRow();
|
||||||
row++;
|
|
||||||
// increment leaf counter - the size check is done lazily while retrieving the columns
|
|
||||||
indexPerLevel[indexPerLevel.length - 1]++;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doReset() {
|
protected void doReset() {
|
||||||
row = 0;
|
agg.reset();
|
||||||
Arrays.fill(indexPerLevel, 0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int size() {
|
public int size() {
|
||||||
return size;
|
return agg.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
||||||
import org.elasticsearch.search.SearchHit;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
class ProcessingHitExtractor implements HitExtractor {
|
|
||||||
static final String NAME = "p";
|
|
||||||
private final HitExtractor delegate;
|
|
||||||
private final ColumnProcessor processor;
|
|
||||||
|
|
||||||
ProcessingHitExtractor(HitExtractor delegate, ColumnProcessor processor) {
|
|
||||||
this.delegate = delegate;
|
|
||||||
this.processor = processor;
|
|
||||||
}
|
|
||||||
|
|
||||||
ProcessingHitExtractor(StreamInput in) throws IOException {
|
|
||||||
delegate = in.readNamedWriteable(HitExtractor.class);
|
|
||||||
processor = in.readNamedWriteable(ColumnProcessor.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeNamedWriteable(delegate);
|
|
||||||
out.writeNamedWriteable(processor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getWriteableName() {
|
|
||||||
return NAME;
|
|
||||||
}
|
|
||||||
|
|
||||||
HitExtractor delegate() {
|
|
||||||
return delegate;
|
|
||||||
}
|
|
||||||
|
|
||||||
ColumnProcessor processor() {
|
|
||||||
return processor;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object get(SearchHit hit) {
|
|
||||||
return processor.apply(delegate.get(hit));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String innerHitName() {
|
|
||||||
return delegate.innerHitName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == null || obj.getClass() != getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
ProcessingHitExtractor other = (ProcessingHitExtractor) obj;
|
|
||||||
return delegate.equals(other.delegate)
|
|
||||||
&& processor.equals(other.processor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(delegate, processor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return processor + "(" + delegate + ")";
|
|
||||||
}
|
|
||||||
}
|
|
@ -16,6 +16,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|||||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors;
|
||||||
import org.elasticsearch.xpack.sql.session.Cursor;
|
import org.elasticsearch.xpack.sql.session.Cursor;
|
||||||
import org.elasticsearch.xpack.sql.session.RowSetCursor;
|
import org.elasticsearch.xpack.sql.session.RowSetCursor;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -36,7 +38,7 @@ public class ScrollCursor implements Cursor {
|
|||||||
/**
|
/**
|
||||||
* {@link NamedWriteableRegistry} used to resolve the {@link #extractors}.
|
* {@link NamedWriteableRegistry} used to resolve the {@link #extractors}.
|
||||||
*/
|
*/
|
||||||
private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(HitExtractor.getNamedWriteables());
|
private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(HitExtractors.getNamedWriteables());
|
||||||
|
|
||||||
private final String scrollId;
|
private final String scrollId;
|
||||||
private final List<HitExtractor> extractors;
|
private final List<HitExtractor> extractors;
|
||||||
|
@ -23,13 +23,24 @@ import org.elasticsearch.search.aggregations.support.AggregationPath;
|
|||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggValueInput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.HitExtractorInput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
|
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.AggRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.AggRef;
|
||||||
|
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
|
||||||
|
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.NestedFieldRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.NestedFieldRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.ProcessingRef;
|
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
|
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.Reference;
|
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.TotalCountRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.TotalCountRef;
|
||||||
@ -38,11 +49,12 @@ import org.elasticsearch.xpack.sql.session.Rows;
|
|||||||
import org.elasticsearch.xpack.sql.session.SqlSettings;
|
import org.elasticsearch.xpack.sql.session.SqlSettings;
|
||||||
import org.elasticsearch.xpack.sql.type.Schema;
|
import org.elasticsearch.xpack.sql.type.Schema;
|
||||||
import org.elasticsearch.xpack.sql.util.ObjectUtils;
|
import org.elasticsearch.xpack.sql.util.ObjectUtils;
|
||||||
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Supplier;
|
||||||
// TODO: add retry/back-off
|
// TODO: add retry/back-off
|
||||||
public class Scroller {
|
public class Scroller {
|
||||||
|
|
||||||
@ -68,7 +80,9 @@ public class Scroller {
|
|||||||
// prepare the request
|
// prepare the request
|
||||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(query);
|
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(query);
|
||||||
|
|
||||||
log.trace("About to execute query {} on {}", sourceBuilder, index);
|
if (log.isTraceEnabled()) {
|
||||||
|
log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index);
|
||||||
|
}
|
||||||
|
|
||||||
SearchRequest search = client.prepareSearch(index).setSource(sourceBuilder).request();
|
SearchRequest search = client.prepareSearch(index).setSource(sourceBuilder).request();
|
||||||
search.scroll(keepAlive).source().timeout(timeout);
|
search.scroll(keepAlive).source().timeout(timeout);
|
||||||
@ -79,7 +93,9 @@ public class Scroller {
|
|||||||
search.source().size(sz);
|
search.source().size(sz);
|
||||||
}
|
}
|
||||||
|
|
||||||
ScrollerActionListener l = query.isAggsOnly() ? new AggsScrollActionListener(listener, client, timeout, schema, query) : new HandshakeScrollActionListener(listener, client, timeout, schema, query);
|
boolean isAggsOnly = query.isAggsOnly();
|
||||||
|
|
||||||
|
ScrollerActionListener l = isAggsOnly ? new AggsScrollActionListener(listener, client, timeout, schema, query) : new HandshakeScrollActionListener(listener, client, timeout, schema, query);
|
||||||
client.search(search, l);
|
client.search(search, l);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +107,7 @@ public class Scroller {
|
|||||||
|
|
||||||
// dedicated scroll used for aggs-only/group-by results
|
// dedicated scroll used for aggs-only/group-by results
|
||||||
static class AggsScrollActionListener extends ScrollerActionListener {
|
static class AggsScrollActionListener extends ScrollerActionListener {
|
||||||
|
|
||||||
private final QueryContainer query;
|
private final QueryContainer query;
|
||||||
|
|
||||||
AggsScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
AggsScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
|
||||||
@ -101,72 +117,90 @@ public class Scroller {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected RowSetCursor handleResponse(SearchResponse response) {
|
protected RowSetCursor handleResponse(SearchResponse response) {
|
||||||
Aggregations aggs = response.getAggregations();
|
|
||||||
|
final List<Object[]> extractedAggs = new ArrayList<>();
|
||||||
List<Object[]> columns = new ArrayList<>();
|
AggValues aggValues = new AggValues(extractedAggs);
|
||||||
|
List<Supplier<Object>> aggColumns = new ArrayList<>(query.columns().size());
|
||||||
|
|
||||||
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
|
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
|
||||||
int maxDepth = -1;
|
int maxDepth = -1;
|
||||||
|
|
||||||
|
List<ColumnReference> cols = query.columns();
|
||||||
|
for (int index = 0; index < cols.size(); index++) {
|
||||||
|
ColumnReference col = cols.get(index);
|
||||||
|
Supplier<Object> supplier = null;
|
||||||
|
|
||||||
for (Reference ref : query.refs()) {
|
if (col instanceof ComputedRef) {
|
||||||
Object[] arr = null;
|
ComputedRef pRef = (ComputedRef) col;
|
||||||
|
|
||||||
ColumnProcessor processor = null;
|
Processor processor = pRef.processor().transformUp(a -> {
|
||||||
|
Object[] value = extractAggValue(new AggRef(a.context()), response);
|
||||||
if (ref instanceof ProcessingRef) {
|
extractedAggs.add(value);
|
||||||
ProcessingRef pRef = (ProcessingRef) ref;
|
final int aggPosition = extractedAggs.size() - 1;
|
||||||
processor = pRef.processor();
|
return new AggValueInput(a.expression(), () -> aggValues.column(aggPosition), a.innerKey());
|
||||||
ref = pRef.ref();
|
}, AggPathInput.class).asProcessor();
|
||||||
|
// the input is provided through the value input above
|
||||||
|
supplier = () -> processor.process(null);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
extractedAggs.add(extractAggValue(col, response));
|
||||||
|
final int aggPosition = extractedAggs.size() - 1;
|
||||||
|
supplier = () -> aggValues.column(aggPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ref == TotalCountRef.INSTANCE) {
|
aggColumns.add(supplier);
|
||||||
arr = new Object[] { processIfNeeded(processor, Long.valueOf(response.getHits().getTotalHits())) };
|
if (col.depth() > maxDepth) {
|
||||||
columns.add(arr);
|
maxDepth = col.depth();
|
||||||
}
|
}
|
||||||
else if (ref instanceof AggRef) {
|
}
|
||||||
|
|
||||||
|
aggValues.init(maxDepth, query.limit());
|
||||||
|
clearScroll(response.getScrollId());
|
||||||
|
|
||||||
|
return new AggsRowSetCursor(schema, aggValues, aggColumns);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object[] extractAggValue(ColumnReference col, SearchResponse response) {
|
||||||
|
if (col == TotalCountRef.INSTANCE) {
|
||||||
|
return new Object[] { Long.valueOf(response.getHits().getTotalHits()) };
|
||||||
|
}
|
||||||
|
else if (col instanceof AggRef) {
|
||||||
|
Object[] arr;
|
||||||
|
|
||||||
|
String path = ((AggRef) col).path();
|
||||||
|
// yup, this is instance equality to make sure we only check the path used by the code
|
||||||
|
if (path == TotalCountRef.PATH) {
|
||||||
|
arr = new Object[] { Long.valueOf(response.getHits().getTotalHits()) };
|
||||||
|
}
|
||||||
|
else {
|
||||||
// workaround for elastic/elasticsearch/issues/23056
|
// workaround for elastic/elasticsearch/issues/23056
|
||||||
String path = ((AggRef) ref).path();
|
|
||||||
boolean formattedKey = AggPath.isBucketValueFormatted(path);
|
boolean formattedKey = AggPath.isBucketValueFormatted(path);
|
||||||
if (formattedKey) {
|
if (formattedKey) {
|
||||||
path = AggPath.bucketValueWithoutFormat(path);
|
path = AggPath.bucketValueWithoutFormat(path);
|
||||||
}
|
}
|
||||||
Object value = getAggProperty(aggs, path);
|
Object value = getAggProperty(response.getAggregations(), path);
|
||||||
|
|
||||||
// // FIXME: this can be tabular in nature
|
// // FIXME: this can be tabular in nature
|
||||||
// if (ref instanceof MappedAggRef) {
|
// if (ref instanceof MappedAggRef) {
|
||||||
// Map<String, Object> map = (Map<String, Object>) value;
|
// Map<String, Object> map = (Map<String, Object>) value;
|
||||||
// Object extractedValue = map.get(((MappedAggRef) ref).fieldName());
|
// Object extractedValue = map.get(((MappedAggRef)
|
||||||
// }
|
// ref).fieldName());
|
||||||
|
// }
|
||||||
|
|
||||||
if (formattedKey) {
|
if (formattedKey) {
|
||||||
List<? extends Bucket> buckets = ((MultiBucketsAggregation) value).getBuckets();
|
List<? extends Bucket> buckets = ((MultiBucketsAggregation) value).getBuckets();
|
||||||
arr = new Object[buckets.size()];
|
arr = new Object[buckets.size()];
|
||||||
for (int i = 0; i < buckets.size(); i++) {
|
for (int i = 0; i < buckets.size(); i++) {
|
||||||
arr[i] = buckets.get(i).getKeyAsString();
|
arr[i] = buckets.get(i).getKeyAsString();
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
arr = value instanceof Object[] ? (Object[]) value : new Object[] { value };
|
arr = value instanceof Object[] ? (Object[]) value : new Object[] { value };
|
||||||
}
|
}
|
||||||
|
|
||||||
// process if needed
|
|
||||||
for (int i = 0; i < arr.length; i++) {
|
|
||||||
arr[i] = processIfNeeded(processor, arr[i]);
|
|
||||||
}
|
|
||||||
columns.add(arr);
|
|
||||||
}
|
|
||||||
// aggs without any grouping
|
|
||||||
else {
|
|
||||||
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", ref.getClass());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ref.depth() > maxDepth) {
|
|
||||||
maxDepth = ref.depth();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return arr;
|
||||||
}
|
}
|
||||||
|
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", col.getClass());
|
||||||
clearScroll(response.getScrollId());
|
|
||||||
return new AggsRowSetCursor(schema, columns, maxDepth, query.limit());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Object getAggProperty(Aggregations aggs, String path) {
|
private static Object getAggProperty(Aggregations aggs, String path) {
|
||||||
@ -178,10 +212,6 @@ public class Scroller {
|
|||||||
}
|
}
|
||||||
return agg.getProperty(list.subList(1, list.size()));
|
return agg.getProperty(list.subList(1, list.size()));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object processIfNeeded(ColumnProcessor processor, Object value) {
|
|
||||||
return processor != null ? processor.apply(value) : value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// initial scroll used for parsing search hits (handles possible aggs)
|
// initial scroll used for parsing search hits (handles possible aggs)
|
||||||
@ -202,17 +232,17 @@ public class Scroller {
|
|||||||
@Override
|
@Override
|
||||||
protected List<HitExtractor> getExtractors() {
|
protected List<HitExtractor> getExtractors() {
|
||||||
// create response extractors for the first time
|
// create response extractors for the first time
|
||||||
List<Reference> refs = query.refs();
|
List<ColumnReference> refs = query.columns();
|
||||||
|
|
||||||
List<HitExtractor> exts = new ArrayList<>(refs.size());
|
List<HitExtractor> exts = new ArrayList<>(refs.size());
|
||||||
|
|
||||||
for (Reference ref : refs) {
|
for (ColumnReference ref : refs) {
|
||||||
exts.add(createExtractor(ref));
|
exts.add(createExtractor(ref));
|
||||||
}
|
}
|
||||||
return exts;
|
return exts;
|
||||||
}
|
}
|
||||||
|
|
||||||
private HitExtractor createExtractor(Reference ref) {
|
private HitExtractor createExtractor(ColumnReference ref) {
|
||||||
if (ref instanceof SearchHitFieldRef) {
|
if (ref instanceof SearchHitFieldRef) {
|
||||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||||
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
|
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
|
||||||
@ -228,9 +258,10 @@ public class Scroller {
|
|||||||
return new DocValueExtractor(f.name());
|
return new DocValueExtractor(f.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ref instanceof ProcessingRef) {
|
if (ref instanceof ComputedRef) {
|
||||||
ProcessingRef pRef = (ProcessingRef) ref;
|
ProcessorDefinition proc = ((ComputedRef) ref).processor();
|
||||||
return new ProcessingHitExtractor(createExtractor(pRef.ref()), pRef.processor());
|
proc = proc.transformDown(l -> new HitExtractorInput(l.expression(), createExtractor(l.context())), ReferenceInput.class);
|
||||||
|
return new ComputingHitExtractor(proc.asProcessor());
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
|
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
|
||||||
@ -303,7 +334,8 @@ public class Scroller {
|
|||||||
|
|
||||||
private static boolean needsHit(List<HitExtractor> exts) {
|
private static boolean needsHit(List<HitExtractor> exts) {
|
||||||
for (HitExtractor ext : exts) {
|
for (HitExtractor ext : exts) {
|
||||||
if (ext instanceof DocValueExtractor || ext instanceof ProcessingHitExtractor) {
|
// Anything non-constant requires extraction
|
||||||
|
if (!(ext instanceof ConstantExtractor)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener;
|
|||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
import org.elasticsearch.search.SearchHits;
|
import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
|
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
|
||||||
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
|
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
|
||||||
import org.elasticsearch.xpack.sql.session.Cursor;
|
import org.elasticsearch.xpack.sql.session.Cursor;
|
||||||
import org.elasticsearch.xpack.sql.session.RowSetCursor;
|
import org.elasticsearch.xpack.sql.session.RowSetCursor;
|
||||||
|
@ -5,12 +5,8 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
@ -18,27 +14,36 @@ import org.elasticsearch.search.fetch.StoredFieldsContext;
|
|||||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||||
|
import org.elasticsearch.search.sort.SortBuilder;
|
||||||
|
import org.elasticsearch.search.sort.SortOrder;
|
||||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.agg.Aggs;
|
import org.elasticsearch.xpack.sql.querydsl.agg.Aggs;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.AttributeSort;
|
import org.elasticsearch.xpack.sql.querydsl.container.AttributeSort;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.ProcessingRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
|
||||||
|
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
|
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.Reference;
|
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.ScriptSort;
|
import org.elasticsearch.xpack.sql.querydsl.container.ScriptSort;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.Sort;
|
import org.elasticsearch.xpack.sql.querydsl.container.Sort;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
|
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery;
|
import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery;
|
||||||
import org.elasticsearch.search.sort.SortBuilder;
|
|
||||||
import org.elasticsearch.search.sort.SortOrder;
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
|
||||||
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
|
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
|
||||||
import static org.elasticsearch.search.sort.SortBuilders.scriptSort;
|
import static org.elasticsearch.search.sort.SortBuilders.scriptSort;
|
||||||
|
|
||||||
@ -56,29 +61,20 @@ public abstract class SourceGenerator {
|
|||||||
// translate fields to source-fields or script fields
|
// translate fields to source-fields or script fields
|
||||||
Set<String> sourceFields = new LinkedHashSet<>();
|
Set<String> sourceFields = new LinkedHashSet<>();
|
||||||
Set<String> docFields = new LinkedHashSet<>();
|
Set<String> docFields = new LinkedHashSet<>();
|
||||||
for (Reference ref : container.refs()) {
|
Map<String, Script> scriptFields = new LinkedHashMap<>();
|
||||||
if (ref instanceof ProcessingRef) {
|
|
||||||
ref = ((ProcessingRef) ref).ref();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ref instanceof SearchHitFieldRef) {
|
for (ColumnReference ref : container.columns()) {
|
||||||
SearchHitFieldRef sh = (SearchHitFieldRef) ref;
|
collectFields(ref, sourceFields, docFields, scriptFields);
|
||||||
Set<String> collection = sh.useDocValue() ? docFields : sourceFields;
|
|
||||||
collection.add(ref.toString());
|
|
||||||
}
|
|
||||||
else if (ref instanceof ScriptFieldRef) {
|
|
||||||
ScriptFieldRef sfr = (ScriptFieldRef) ref;
|
|
||||||
source.scriptField(sfr.name(), sfr.script().toPainless());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!sourceFields.isEmpty()) {
|
if (!sourceFields.isEmpty()) {
|
||||||
source.fetchSource(sourceFields.toArray(new String[sourceFields.size()]), null);
|
source.fetchSource(sourceFields.toArray(new String[sourceFields.size()]), null);
|
||||||
}
|
}
|
||||||
if (!docFields.isEmpty()) {
|
for (String field : docFields) {
|
||||||
for (String field : docFields) {
|
source.docValueField(field);
|
||||||
source.docValueField(field);
|
}
|
||||||
}
|
for (Entry<String, Script> entry : scriptFields.entrySet()) {
|
||||||
|
source.scriptField(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
sorting(container, source);
|
sorting(container, source);
|
||||||
@ -99,6 +95,22 @@ public abstract class SourceGenerator {
|
|||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void collectFields(ColumnReference ref, Set<String> sourceFields, Set<String> docFields, Map<String, Script> scriptFields) {
|
||||||
|
if (ref instanceof ComputedRef) {
|
||||||
|
ProcessorDefinition proc = ((ComputedRef) ref).processor();
|
||||||
|
proc.forEachUp(l -> collectFields(l.context(), sourceFields, docFields, scriptFields), ReferenceInput.class);
|
||||||
|
}
|
||||||
|
else if (ref instanceof SearchHitFieldRef) {
|
||||||
|
SearchHitFieldRef sh = (SearchHitFieldRef) ref;
|
||||||
|
Set<String> collection = sh.useDocValue() ? docFields : sourceFields;
|
||||||
|
collection.add(sh.name());
|
||||||
|
}
|
||||||
|
else if (ref instanceof ScriptFieldRef) {
|
||||||
|
ScriptFieldRef sfr = (ScriptFieldRef) ref;
|
||||||
|
scriptFields.put(sfr.name(), sfr.script().toPainless());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void sorting(QueryContainer container, SearchSourceBuilder source) {
|
private static void sorting(QueryContainer container, SearchSourceBuilder source) {
|
||||||
if (container.sort() != null) {
|
if (container.sort() != null) {
|
||||||
|
|
||||||
|
@ -0,0 +1,82 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HitExtractor that delegates to a processor. The difference between this class
|
||||||
|
* and {@link HitExtractorProcessor} is that the latter is used inside a
|
||||||
|
* {@link Processor} tree as a leaf (and thus can effectively parse the
|
||||||
|
* {@link SearchHit} while this class is used when scrolling and passing down
|
||||||
|
* the results.
|
||||||
|
*
|
||||||
|
* In the future, the processor might be used across the board for all columns
|
||||||
|
* to reduce API complexity (and keep the {@link HitExtractor} only as an
|
||||||
|
* internal implementation detail).
|
||||||
|
*/
|
||||||
|
public class ComputingHitExtractor implements HitExtractor {
|
||||||
|
static final String NAME = "p";
|
||||||
|
private final Processor processor;
|
||||||
|
|
||||||
|
public ComputingHitExtractor(Processor processor) {
|
||||||
|
this.processor = processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
ComputingHitExtractor(StreamInput in) throws IOException {
|
||||||
|
processor = in.readNamedWriteable(Processor.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeNamedWriteable(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getWriteableName() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Processor processor() {
|
||||||
|
return processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object get(SearchHit hit) {
|
||||||
|
return processor.process(hit);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String innerHitName() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || obj.getClass() != getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ComputingHitExtractor other = (ComputingHitExtractor) obj;
|
||||||
|
return processor.equals(other.processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return processor.toString();
|
||||||
|
}
|
||||||
|
}
|
@ -3,7 +3,7 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
@ -15,11 +15,11 @@ import java.util.Objects;
|
|||||||
/**
|
/**
|
||||||
* Returns the a constant for every search hit against which it is run.
|
* Returns the a constant for every search hit against which it is run.
|
||||||
*/
|
*/
|
||||||
class ConstantExtractor implements HitExtractor {
|
public class ConstantExtractor implements HitExtractor {
|
||||||
static final String NAME = "c";
|
static final String NAME = "c";
|
||||||
private final Object constant;
|
private final Object constant;
|
||||||
|
|
||||||
ConstantExtractor(Object constant) {
|
public ConstantExtractor(Object constant) {
|
||||||
this.constant = constant;
|
this.constant = constant;
|
||||||
}
|
}
|
||||||
|
|
@ -3,7 +3,7 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
import org.elasticsearch.common.document.DocumentField;
|
import org.elasticsearch.common.document.DocumentField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
@ -15,11 +15,11 @@ import java.io.IOException;
|
|||||||
/**
|
/**
|
||||||
* Extracts field values from {@link SearchHit#field(String)}.
|
* Extracts field values from {@link SearchHit#field(String)}.
|
||||||
*/
|
*/
|
||||||
class DocValueExtractor implements HitExtractor {
|
public class DocValueExtractor implements HitExtractor {
|
||||||
static final String NAME = "f";
|
static final String NAME = "f";
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
|
|
||||||
DocValueExtractor(String name) {
|
public DocValueExtractor(String name) {
|
||||||
this.fieldName = name;
|
this.fieldName = name;
|
||||||
}
|
}
|
||||||
|
|
@ -0,0 +1,26 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts a column value from a {@link SearchHit}.
|
||||||
|
*/
|
||||||
|
public interface HitExtractor extends NamedWriteable {
|
||||||
|
/**
|
||||||
|
* Extract the value from a hit.
|
||||||
|
*/
|
||||||
|
Object get(SearchHit hit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
String innerHitName();
|
||||||
|
}
|
@ -3,45 +3,29 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.Processors;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
public abstract class HitExtractors {
|
||||||
* Extracts a columns value from a {@link SearchHit}.
|
|
||||||
*/
|
|
||||||
public interface HitExtractor extends NamedWriteable {
|
|
||||||
/**
|
/**
|
||||||
* All of the named writeables needed to deserialize the instances
|
* All of the named writeables needed to deserialize the instances of
|
||||||
* of {@linkplain HitExtractor}.
|
* {@linkplain HitExtractor}.
|
||||||
*/
|
*/
|
||||||
static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||||
entries.add(new Entry(HitExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new));
|
entries.add(new Entry(HitExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new));
|
||||||
entries.add(new Entry(HitExtractor.class, DocValueExtractor.NAME, DocValueExtractor::new));
|
entries.add(new Entry(HitExtractor.class, DocValueExtractor.NAME, DocValueExtractor::new));
|
||||||
entries.add(new Entry(HitExtractor.class, InnerHitExtractor.NAME, InnerHitExtractor::new));
|
entries.add(new Entry(HitExtractor.class, InnerHitExtractor.NAME, InnerHitExtractor::new));
|
||||||
entries.add(new Entry(HitExtractor.class, SourceExtractor.NAME, SourceExtractor::new));
|
entries.add(new Entry(HitExtractor.class, SourceExtractor.NAME, SourceExtractor::new));
|
||||||
entries.add(new Entry(HitExtractor.class, ProcessingHitExtractor.NAME, ProcessingHitExtractor::new));
|
entries.add(new Entry(HitExtractor.class, ComputingHitExtractor.NAME, ComputingHitExtractor::new));
|
||||||
entries.addAll(ColumnProcessor.getNamedWriteables());
|
entries.addAll(Processors.getNamedWriteables());
|
||||||
return entries;
|
return entries;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
/**
|
|
||||||
* Extract the value from a hit.
|
|
||||||
*/
|
|
||||||
Object get(SearchHit hit);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise.
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
String innerHitName();
|
|
||||||
}
|
|
@ -3,7 +3,7 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.document.DocumentField;
|
import org.elasticsearch.common.document.DocumentField;
|
||||||
@ -16,13 +16,13 @@ import java.io.IOException;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
class InnerHitExtractor implements HitExtractor {
|
public class InnerHitExtractor implements HitExtractor {
|
||||||
static final String NAME = "i";
|
static final String NAME = "i";
|
||||||
private final String hitName, fieldName;
|
private final String hitName, fieldName;
|
||||||
private final boolean useDocValue;
|
private final boolean useDocValue;
|
||||||
private final String[] tree;
|
private final String[] tree;
|
||||||
|
|
||||||
InnerHitExtractor(String hitName, String name, boolean useDocValue) {
|
public InnerHitExtractor(String hitName, String name, boolean useDocValue) {
|
||||||
this.hitName = hitName;
|
this.hitName = hitName;
|
||||||
this.fieldName = name;
|
this.fieldName = name;
|
||||||
this.useDocValue = useDocValue;
|
this.useDocValue = useDocValue;
|
@ -3,20 +3,20 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.execution.search;
|
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
|
||||||
class SourceExtractor implements HitExtractor {
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class SourceExtractor implements HitExtractor {
|
||||||
public static final String NAME = "s";
|
public static final String NAME = "s";
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
|
|
||||||
SourceExtractor(String name) {
|
public SourceExtractor(String name) {
|
||||||
this.fieldName = name;
|
this.fieldName = name;
|
||||||
}
|
}
|
||||||
|
|
@ -5,21 +5,15 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression;
|
package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
|
||||||
|
|
||||||
public abstract class BinaryExpression extends Expression {
|
public abstract class BinaryExpression extends Expression {
|
||||||
|
|
||||||
private final Expression left, right;
|
private final Expression left, right;
|
||||||
|
|
||||||
public interface Negateable {
|
|
||||||
BinaryExpression negate();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected BinaryExpression(Location location, Expression left, Expression right) {
|
protected BinaryExpression(Location location, Expression left, Expression right) {
|
||||||
super(location, Arrays.asList(left, right));
|
super(location, Arrays.asList(left, right));
|
||||||
this.left = left;
|
this.left = left;
|
||||||
@ -44,13 +38,6 @@ public abstract class BinaryExpression extends Expression {
|
|||||||
return left.nullable() || left.nullable();
|
return left.nullable() || left.nullable();
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract BinaryExpression swapLeftAndRight();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DataType dataType() {
|
|
||||||
return DataTypes.BOOLEAN;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(left, right);
|
return Objects.hash(left, right);
|
||||||
@ -79,6 +66,7 @@ public abstract class BinaryExpression extends Expression {
|
|||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
// simplify toString
|
|
||||||
public abstract String symbol();
|
public abstract String symbol();
|
||||||
|
|
||||||
|
public abstract BinaryExpression swapLeftAndRight();
|
||||||
}
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
|
||||||
|
public abstract class BinaryLogic extends BinaryOperator {
|
||||||
|
|
||||||
|
protected BinaryLogic(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return DataTypes.BOOLEAN;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TypeResolution resolveInputType(DataType inputType) {
|
||||||
|
return DataTypes.BOOLEAN.equals(inputType) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
||||||
|
"'%s' requires type %s not %s", symbol(), DataTypes.BOOLEAN.sqlName(), inputType.sqlName());
|
||||||
|
}
|
||||||
|
}
|
@ -7,30 +7,33 @@ package org.elasticsearch.xpack.sql.expression;
|
|||||||
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
//Binary expression that requires both input expressions to have the same type
|
||||||
|
//Compatible types should be handled by the analyzer (by using the narrowest type)
|
||||||
public abstract class BinaryOperator extends BinaryExpression {
|
public abstract class BinaryOperator extends BinaryExpression {
|
||||||
|
|
||||||
|
public interface Negateable {
|
||||||
|
BinaryExpression negate();
|
||||||
|
}
|
||||||
|
|
||||||
protected BinaryOperator(Location location, Expression left, Expression right) {
|
protected BinaryOperator(Location location, Expression left, Expression right) {
|
||||||
super(location, left, right);
|
super(location, left, right);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract DataType acceptedType();
|
protected abstract TypeResolution resolveInputType(DataType inputType);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TypeResolution resolveType() {
|
protected TypeResolution resolveType() {
|
||||||
DataType accepted = acceptedType();
|
if (!childrenResolved()) {
|
||||||
|
return new TypeResolution("Unresolved children");
|
||||||
|
}
|
||||||
DataType l = left().dataType();
|
DataType l = left().dataType();
|
||||||
DataType r = right().dataType();
|
DataType r = right().dataType();
|
||||||
|
|
||||||
if (!l.same(r)) {
|
TypeResolution resolution = resolveInputType(l);
|
||||||
return new TypeResolution("Different types (%s and %s) used in '%s'", l.sqlName(), r.sqlName(), symbol());
|
|
||||||
}
|
if (resolution == TypeResolution.TYPE_RESOLVED) {
|
||||||
if (!DataTypeConversion.canConvert(accepted, left().dataType())) {
|
return resolveInputType(r);
|
||||||
return new TypeResolution("'%s' requires type %s not %s", symbol(), accepted.sqlName(), l.sqlName());
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return TypeResolution.TYPE_RESOLVED;
|
|
||||||
}
|
}
|
||||||
|
return resolution;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -5,6 +5,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression;
|
package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.elasticsearch.xpack.sql.capabilities.Resolvable;
|
import org.elasticsearch.xpack.sql.capabilities.Resolvable;
|
||||||
import org.elasticsearch.xpack.sql.capabilities.Resolvables;
|
import org.elasticsearch.xpack.sql.capabilities.Resolvables;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
@ -62,7 +63,7 @@ public abstract class Expression extends Node<Expression> implements Resolvable
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Object fold() {
|
public Object fold() {
|
||||||
return null;
|
throw new SqlIllegalArgumentException("Should not fold expression");
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract boolean nullable();
|
public abstract boolean nullable();
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression;
|
package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
@ -68,4 +70,14 @@ public abstract class Expressions {
|
|||||||
public static Attribute attribute(Expression e) {
|
public static Attribute attribute(Expression e) {
|
||||||
return e instanceof NamedExpression ? ((NamedExpression) e).toAttribute() : null;
|
return e instanceof NamedExpression ? ((NamedExpression) e).toAttribute() : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static TypeResolution typeMustBe(Expression e, Predicate<Expression> predicate, String message) {
|
||||||
|
return predicate.test(e) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TypeResolution typeMustBeNumeric(Expression e) {
|
||||||
|
return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
||||||
|
"Argument required to be numeric ('%s' of type '%s')", Expressions.name(e), e.dataType().esName());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression;
|
|||||||
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -21,8 +22,8 @@ public class Literal extends LeafExpression {
|
|||||||
|
|
||||||
public Literal(Location location, Object value, DataType dataType) {
|
public Literal(Location location, Object value, DataType dataType) {
|
||||||
super(location);
|
super(location);
|
||||||
this.value = value;
|
|
||||||
this.dataType = dataType;
|
this.dataType = dataType;
|
||||||
|
this.value = DataTypeConversion.convert(value, dataType);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Object value() {
|
public Object value() {
|
||||||
|
@ -30,11 +30,6 @@ public abstract class UnaryExpression extends Expression {
|
|||||||
return child.nullable();
|
return child.nullable();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean foldable() {
|
|
||||||
return child.foldable();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean resolved() {
|
public boolean resolved() {
|
||||||
return child.resolved();
|
return child.resolved();
|
||||||
|
@ -40,11 +40,6 @@ public abstract class Function extends NamedExpression {
|
|||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean foldable() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean nullable() {
|
public boolean nullable() {
|
||||||
return false;
|
return false;
|
||||||
|
@ -9,15 +9,8 @@ import org.elasticsearch.xpack.sql.expression.Alias;
|
|||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ComposeProcessor;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
|
||||||
import static java.util.Collections.singletonList;
|
|
||||||
|
|
||||||
public abstract class Functions {
|
public abstract class Functions {
|
||||||
|
|
||||||
@ -25,8 +18,11 @@ public abstract class Functions {
|
|||||||
return e instanceof AggregateFunction;
|
return e instanceof AggregateFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isScalarFunction(Expression e) {
|
public static boolean isUnaryScalarFunction(Expression e) {
|
||||||
return e instanceof ScalarFunction;
|
if (e instanceof BinaryScalarFunction) {
|
||||||
|
throw new UnsupportedOperationException("not handled currently");
|
||||||
|
}
|
||||||
|
return e instanceof UnaryScalarFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static AggregateFunction extractAggregate(NamedExpression ne) {
|
public static AggregateFunction extractAggregate(NamedExpression ne) {
|
||||||
@ -35,8 +31,11 @@ public abstract class Functions {
|
|||||||
if (e instanceof Alias) {
|
if (e instanceof Alias) {
|
||||||
e = ((Alias) ne).child();
|
e = ((Alias) ne).child();
|
||||||
}
|
}
|
||||||
else if (e instanceof ScalarFunction) {
|
else if (e instanceof UnaryScalarFunction) {
|
||||||
e = ((ScalarFunction) e).argument();
|
e = ((UnaryScalarFunction) e).field();
|
||||||
|
}
|
||||||
|
else if (e instanceof BinaryScalarFunction) {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
else if (e instanceof AggregateFunction) {
|
else if (e instanceof AggregateFunction) {
|
||||||
return (AggregateFunction) e;
|
return (AggregateFunction) e;
|
||||||
@ -47,52 +46,4 @@ public abstract class Functions {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<Expression> unwrapScalarFunctionWithTail(Expression e) {
|
|
||||||
if (!(e instanceof ScalarFunction)) {
|
|
||||||
return emptyList();
|
|
||||||
}
|
|
||||||
List<Expression> exps = new ArrayList<>();
|
|
||||||
while (isScalarFunction(e)) {
|
|
||||||
ScalarFunction scalar = (ScalarFunction) e;
|
|
||||||
exps.add(scalar);
|
|
||||||
e = scalar.argument();
|
|
||||||
}
|
|
||||||
exps.add(e);
|
|
||||||
return exps;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<ScalarFunction> unwrapScalarProcessor(Expression e) {
|
|
||||||
if (!(e instanceof ScalarFunction)) {
|
|
||||||
return emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
// common-case (single function wrapper)
|
|
||||||
if (e instanceof ScalarFunction && !(((ScalarFunction) e).argument() instanceof ScalarFunction)) {
|
|
||||||
return singletonList((ScalarFunction) e);
|
|
||||||
}
|
|
||||||
|
|
||||||
List<ScalarFunction> exps = new ArrayList<>();
|
|
||||||
while (e instanceof ScalarFunction) {
|
|
||||||
ScalarFunction scalar = (ScalarFunction) e;
|
|
||||||
exps.add(scalar);
|
|
||||||
e = scalar.argument();
|
|
||||||
}
|
|
||||||
return exps;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ColumnProcessor chainProcessors(List<Expression> unwrappedScalar) {
|
|
||||||
ColumnProcessor proc = null;
|
|
||||||
for (Expression e : unwrappedScalar) {
|
|
||||||
if (e instanceof ScalarFunction) {
|
|
||||||
ScalarFunction sf = (ScalarFunction) e;
|
|
||||||
// A(B(C)) is applied backwards first C then B then A, the last function first
|
|
||||||
proc = proc == null ? sf.asProcessor() : new ComposeProcessor(sf.asProcessor(), proc);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return proc;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return proc;
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate;
|
|||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
public class Avg extends NumericAggregate implements EnclosedAgg {
|
public class Avg extends NumericAggregate implements EnclosedAgg {
|
||||||
|
|
||||||
@ -18,4 +19,9 @@ public class Avg extends NumericAggregate implements EnclosedAgg {
|
|||||||
public String innerName() {
|
public String innerName() {
|
||||||
return "avg";
|
return "avg";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return field().dataType();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,10 +25,7 @@ class NumericAggregate extends AggregateFunction {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TypeResolution resolveType() {
|
protected TypeResolution resolveType() {
|
||||||
return field().dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
return Expressions.typeMustBeNumeric(field());
|
||||||
"Function '%s' cannot be applied on a non-numeric expression ('%s' of type '%s')", functionName(),
|
|
||||||
Expressions.name(field()), field().dataType().esName());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.aggregate;
|
package org.elasticsearch.xpack.sql.expression.function.aggregate;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||||
import org.elasticsearch.xpack.sql.expression.Foldables;
|
import org.elasticsearch.xpack.sql.expression.Foldables;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -27,9 +28,9 @@ public class Percentile extends NumericAggregate implements EnclosedAgg {
|
|||||||
TypeResolution resolution = super.resolveType();
|
TypeResolution resolution = super.resolveType();
|
||||||
|
|
||||||
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
|
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
|
||||||
resolution = percent().dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED :
|
resolution = Expressions.typeMustBeNumeric(percent());
|
||||||
new TypeResolution("Percentile#percent argument cannot be non-numeric (type is'%s')", percent().dataType().esName());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolution;
|
return resolution;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.aggregate;
|
package org.elasticsearch.xpack.sql.expression.function.aggregate;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||||
import org.elasticsearch.xpack.sql.expression.Foldables;
|
import org.elasticsearch.xpack.sql.expression.Foldables;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -27,9 +28,9 @@ public class PercentileRank extends AggregateFunction implements EnclosedAgg {
|
|||||||
TypeResolution resolution = super.resolveType();
|
TypeResolution resolution = super.resolveType();
|
||||||
|
|
||||||
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
|
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
|
||||||
resolution = value.dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED :
|
resolution = Expressions.typeMustBeNumeric(value);
|
||||||
new TypeResolution("PercentileRank#value argument cannot be non-numeric (type is'%s')", value.dataType().esName());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return resolution;
|
return resolution;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.expression.Expression;
|
|||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class PercentileRanks extends CompoundNumericAggregate {
|
public class PercentileRanks extends CompoundNumericAggregate {
|
||||||
|
|
||||||
@ -23,19 +22,4 @@ public class PercentileRanks extends CompoundNumericAggregate {
|
|||||||
public List<Expression> values() {
|
public List<Expression> values() {
|
||||||
return values;
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (this == obj) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj == null || getClass() != obj.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
PercentileRanks other = (PercentileRanks) obj;
|
|
||||||
return Objects.equals(field(), other.field())
|
|
||||||
&& Objects.equals(values, other.values);
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.expression.Expression;
|
|||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class Percentiles extends CompoundNumericAggregate {
|
public class Percentiles extends CompoundNumericAggregate {
|
||||||
|
|
||||||
@ -23,19 +22,4 @@ public class Percentiles extends CompoundNumericAggregate {
|
|||||||
public List<Expression> percents() {
|
public List<Expression> percents() {
|
||||||
return percents;
|
return percents;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (this == obj) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj == null || getClass() != obj.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
Percentiles other = (Percentiles) obj;
|
|
||||||
return Objects.equals(field(), other.field())
|
|
||||||
&& Objects.equals(percents, other.percents);
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -0,0 +1,98 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
|
public abstract class BinaryScalarFunction extends ScalarFunction {
|
||||||
|
|
||||||
|
private final Expression left, right;
|
||||||
|
|
||||||
|
protected BinaryScalarFunction(Location location, Expression left, Expression right) {
|
||||||
|
super(location, Arrays.asList(left, right));
|
||||||
|
this.left = left;
|
||||||
|
this.right = right;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Expression left() {
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Expression right() {
|
||||||
|
return right;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean foldable() {
|
||||||
|
return left.foldable() && right.foldable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScalarFunctionAttribute toAttribute() {
|
||||||
|
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessor());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScript() {
|
||||||
|
ScriptTemplate leftScript = asScript(left());
|
||||||
|
ScriptTemplate rightScript = asScript(right());
|
||||||
|
|
||||||
|
return asScriptFrom(leftScript, rightScript);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript);
|
||||||
|
|
||||||
|
protected ScriptTemplate asScript(Expression exp) {
|
||||||
|
if (exp.foldable()) {
|
||||||
|
return asScriptFromFoldable(exp);
|
||||||
|
}
|
||||||
|
|
||||||
|
Attribute attr = Expressions.attribute(exp);
|
||||||
|
if (attr != null) {
|
||||||
|
if (attr instanceof ScalarFunctionAttribute) {
|
||||||
|
return asScriptFrom((ScalarFunctionAttribute) attr);
|
||||||
|
}
|
||||||
|
if (attr instanceof AggregateFunctionAttribute) {
|
||||||
|
return asScriptFrom((AggregateFunctionAttribute) attr);
|
||||||
|
}
|
||||||
|
// fall-back to
|
||||||
|
return asScriptFrom((FieldAttribute) attr);
|
||||||
|
}
|
||||||
|
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", exp);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
|
||||||
|
return scalar.script();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
|
||||||
|
return new ScriptTemplate(formatTemplate("{}"),
|
||||||
|
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
|
||||||
|
aggregate.dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
|
||||||
|
return new ScriptTemplate(formatTemplate("doc[{}].value"),
|
||||||
|
paramsBuilder().variable(field.name()).build(),
|
||||||
|
field.dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
|
||||||
|
return new ScriptTemplate(formatTemplate("{}"),
|
||||||
|
paramsBuilder().variable(foldable.fold()).build(),
|
||||||
|
foldable.dataType());
|
||||||
|
}
|
||||||
|
}
|
@ -8,6 +8,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
@ -19,17 +22,17 @@ import java.util.Objects;
|
|||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
public class Cast extends ScalarFunction {
|
public class Cast extends UnaryScalarFunction {
|
||||||
|
|
||||||
private final DataType dataType;
|
private final DataType dataType;
|
||||||
|
|
||||||
public Cast(Location location, Expression argument, DataType dataType) {
|
public Cast(Location location, Expression field, DataType dataType) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
this.dataType = dataType;
|
this.dataType = dataType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataType from() {
|
public DataType from() {
|
||||||
return argument().dataType();
|
return field().dataType();
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataType to() {
|
public DataType to() {
|
||||||
@ -41,9 +44,19 @@ public class Cast extends ScalarFunction {
|
|||||||
return dataType;
|
return dataType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean foldable() {
|
||||||
|
return field().foldable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return DataTypeConversion.convert(field().fold(), dataType);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean nullable() {
|
public boolean nullable() {
|
||||||
return argument().nullable() || DataTypeConversion.nullable(from(), to());
|
return field().nullable() || DataTypeConversion.nullable(from());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -77,8 +90,8 @@ public class Cast extends ScalarFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ColumnProcessor asProcessor() {
|
protected ProcessorDefinition makeProcessor() {
|
||||||
return new CastProcessor(DataTypeConversion.conversionFor(from(), to()));
|
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new CastProcessor(DataTypeConversion.conversionFor(from(), to())));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -88,6 +101,6 @@ public class Cast extends ScalarFunction {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return functionName() + "(" + argument().toString() + " AS " + to().sqlName() + ")#" + id();
|
return functionName() + "(" + field().toString() + " AS " + to().sqlName() + ")#" + id();
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -7,35 +7,39 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion;
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class CastProcessor implements Processor {
|
||||||
|
|
||||||
|
public static final String NAME = "ca";
|
||||||
|
|
||||||
public class CastProcessor implements ColumnProcessor {
|
|
||||||
public static final String NAME = "c";
|
|
||||||
private final Conversion conversion;
|
private final Conversion conversion;
|
||||||
|
|
||||||
CastProcessor(Conversion conversion) {
|
public CastProcessor(Conversion conversion) {
|
||||||
this.conversion = conversion;
|
this.conversion = conversion;
|
||||||
}
|
}
|
||||||
|
|
||||||
CastProcessor(StreamInput in) throws IOException {
|
public CastProcessor(StreamInput in) throws IOException {
|
||||||
conversion = in.readEnum(Conversion.class);
|
conversion = in.readEnum(Conversion.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeEnum(conversion);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getWriteableName() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object apply(Object r) {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
return conversion.convert(r);
|
out.writeEnum(conversion);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object process(Object input) {
|
||||||
|
return conversion.convert(input);
|
||||||
}
|
}
|
||||||
|
|
||||||
Conversion converter() {
|
Conversion converter() {
|
||||||
@ -44,20 +48,25 @@ public class CastProcessor implements ColumnProcessor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == null || obj.getClass() != getClass()) {
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
CastProcessor other = (CastProcessor) obj;
|
CastProcessor other = (CastProcessor) obj;
|
||||||
return conversion.equals(other.conversion);
|
return Objects.equals(conversion, other.conversion);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return conversion.hashCode();
|
return Objects.hash(conversion);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return conversion.toString();
|
return conversion.name();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,32 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public interface ColumnProcessor extends NamedWriteable {
|
|
||||||
/**
|
|
||||||
* All of the named writeables needed to deserialize the instances
|
|
||||||
* of {@linkplain ColumnProcessor}.
|
|
||||||
*/
|
|
||||||
static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
|
||||||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
|
||||||
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, CastProcessor.NAME, CastProcessor::new));
|
|
||||||
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, ComposeProcessor.NAME, ComposeProcessor::new));
|
|
||||||
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
|
|
||||||
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class,
|
|
||||||
MathFunctionProcessor.NAME, MathFunctionProcessor::new));
|
|
||||||
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class,
|
|
||||||
MatrixFieldProcessor.NAME, MatrixFieldProcessor::new));
|
|
||||||
return entries;
|
|
||||||
}
|
|
||||||
|
|
||||||
Object apply(Object r);
|
|
||||||
}
|
|
@ -1,77 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A {@linkplain ColumnProcessor} that composes the results of two
|
|
||||||
* {@linkplain ColumnProcessor}s.
|
|
||||||
*/
|
|
||||||
public class ComposeProcessor implements ColumnProcessor {
|
|
||||||
static final String NAME = ".";
|
|
||||||
private final ColumnProcessor first;
|
|
||||||
private final ColumnProcessor second;
|
|
||||||
|
|
||||||
public ComposeProcessor(ColumnProcessor first, ColumnProcessor second) {
|
|
||||||
this.first = first;
|
|
||||||
this.second = second;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ComposeProcessor(StreamInput in) throws IOException {
|
|
||||||
first = in.readNamedWriteable(ColumnProcessor.class);
|
|
||||||
second = in.readNamedWriteable(ColumnProcessor.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeNamedWriteable(first);
|
|
||||||
out.writeNamedWriteable(second);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getWriteableName() {
|
|
||||||
return NAME;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object apply(Object r) {
|
|
||||||
return second.apply(first.apply(r));
|
|
||||||
}
|
|
||||||
|
|
||||||
ColumnProcessor first() {
|
|
||||||
return first;
|
|
||||||
}
|
|
||||||
|
|
||||||
ColumnProcessor second() {
|
|
||||||
return second;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == null || obj.getClass() != getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
ComposeProcessor other = (ComposeProcessor) obj;
|
|
||||||
return first.equals(other.first)
|
|
||||||
&& second.equals(other.second);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(first, second);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
// borrow Haskell's notation for function comosition
|
|
||||||
return "(" + second + " . " + first + ")";
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,64 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
public class MathFunctionProcessor implements ColumnProcessor {
|
|
||||||
public static final String NAME = "m";
|
|
||||||
|
|
||||||
private final MathProcessor processor;
|
|
||||||
|
|
||||||
public MathFunctionProcessor(MathProcessor processor) {
|
|
||||||
this.processor = processor;
|
|
||||||
}
|
|
||||||
|
|
||||||
MathFunctionProcessor(StreamInput in) throws IOException {
|
|
||||||
processor = in.readEnum(MathProcessor.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeEnum(processor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getWriteableName() {
|
|
||||||
return NAME;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object apply(Object r) {
|
|
||||||
return processor.apply(r);
|
|
||||||
}
|
|
||||||
|
|
||||||
MathProcessor processor() {
|
|
||||||
return processor;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == null || obj.getClass() != getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
MathFunctionProcessor other = (MathFunctionProcessor) obj;
|
|
||||||
return processor == other.processor;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return processor.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return processor.toString();
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,47 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.MatrixFieldProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public abstract class Processors {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All of the named writeables needed to deserialize the instances of
|
||||||
|
* {@linkplain Processors}.
|
||||||
|
*/
|
||||||
|
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||||
|
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||||
|
// base
|
||||||
|
entries.add(new Entry(Processor.class, ConstantProcessor.NAME, ConstantProcessor::new));
|
||||||
|
entries.add(new Entry(Processor.class, HitExtractorProcessor.NAME, HitExtractorProcessor::new));
|
||||||
|
entries.add(new Entry(Processor.class, CastProcessor.NAME, CastProcessor::new));
|
||||||
|
entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new));
|
||||||
|
entries.add(new Entry(Processor.class, MatrixFieldProcessor.NAME, MatrixFieldProcessor::new));
|
||||||
|
|
||||||
|
// arithmetic
|
||||||
|
entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new));
|
||||||
|
entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new));
|
||||||
|
// datetime
|
||||||
|
entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
|
||||||
|
// math
|
||||||
|
entries.add(new Entry(Processor.class, MathProcessor.NAME, MathProcessor::new));
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
}
|
@ -5,83 +5,41 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import java.util.List;
|
||||||
import static java.util.Collections.singletonList;
|
|
||||||
|
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
import static java.util.Collections.emptyList;
|
||||||
|
|
||||||
public abstract class ScalarFunction extends Function {
|
public abstract class ScalarFunction extends Function {
|
||||||
|
|
||||||
private final Expression argument;
|
private ProcessorDefinition lazyProcessor = null;
|
||||||
|
|
||||||
protected ScalarFunction(Location location) {
|
protected ScalarFunction(Location location) {
|
||||||
super(location, emptyList());
|
super(location, emptyList());
|
||||||
this.argument = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ScalarFunction(Location location, Expression child) {
|
protected ScalarFunction(Location location, List<Expression> fields) {
|
||||||
super(location, singletonList(child));
|
super(location, fields);
|
||||||
this.argument = child;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Expression argument() {
|
|
||||||
return argument;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScalarFunctionAttribute toAttribute() {
|
public abstract ScalarFunctionAttribute toAttribute();
|
||||||
String functionId = null;
|
|
||||||
Attribute attr = Expressions.attribute(argument());
|
|
||||||
|
|
||||||
if (attr instanceof AggregateFunctionAttribute) {
|
protected abstract ScriptTemplate asScript();
|
||||||
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) attr;
|
|
||||||
functionId = afa.functionId();
|
public ProcessorDefinition asProcessor() {
|
||||||
|
if (lazyProcessor == null) {
|
||||||
|
lazyProcessor = makeProcessor();
|
||||||
}
|
}
|
||||||
|
return lazyProcessor;
|
||||||
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), functionId);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ScriptTemplate asScript() {
|
protected abstract ProcessorDefinition makeProcessor();
|
||||||
Attribute attr = Expressions.attribute(argument());
|
|
||||||
if (attr != null) {
|
|
||||||
if (attr instanceof ScalarFunctionAttribute) {
|
|
||||||
return asScriptFrom((ScalarFunctionAttribute) attr);
|
|
||||||
}
|
|
||||||
if (attr instanceof AggregateFunctionAttribute) {
|
|
||||||
return asScriptFrom((AggregateFunctionAttribute) attr);
|
|
||||||
}
|
|
||||||
|
|
||||||
// fall-back to
|
|
||||||
return asScriptFrom((FieldAttribute) attr);
|
|
||||||
}
|
|
||||||
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", argument());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
|
|
||||||
ScriptTemplate nested = scalar.script();
|
|
||||||
Params p = paramsBuilder().script(nested.params()).build();
|
|
||||||
return new ScriptTemplate(chainScalarTemplate(nested.template()), p, dataType());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected abstract ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate);
|
|
||||||
|
|
||||||
protected abstract ScriptTemplate asScriptFrom(FieldAttribute field);
|
|
||||||
|
|
||||||
protected abstract String chainScalarTemplate(String template);
|
|
||||||
|
|
||||||
|
|
||||||
public abstract ColumnProcessor asProcessor();
|
|
||||||
|
|
||||||
// used if the function is monotonic and thus does not have to be computed for ordering purposes
|
// used if the function is monotonic and thus does not have to be computed for ordering purposes
|
||||||
public Expression orderBy() {
|
public Expression orderBy() {
|
||||||
|
@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
|
|||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.ExpressionId;
|
import org.elasticsearch.xpack.sql.expression.ExpressionId;
|
||||||
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
|
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -17,17 +18,17 @@ public class ScalarFunctionAttribute extends TypedAttribute {
|
|||||||
|
|
||||||
private final ScriptTemplate script;
|
private final ScriptTemplate script;
|
||||||
private final Expression orderBy;
|
private final Expression orderBy;
|
||||||
private final String functionId;
|
private final ProcessorDefinition processorDef;
|
||||||
|
|
||||||
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, ScriptTemplate script, Expression orderBy, String functionId) {
|
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
|
||||||
this(location, name, dataType, null, true, id, false, script, orderBy, functionId);
|
this(location, name, dataType, null, true, id, false, script, orderBy, processorDef);
|
||||||
}
|
}
|
||||||
|
|
||||||
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, ScriptTemplate script, Expression orderBy, String functionId) {
|
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
|
||||||
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
||||||
this.script = script;
|
this.script = script;
|
||||||
this.orderBy = orderBy;
|
this.orderBy = orderBy;
|
||||||
this.functionId = functionId;
|
this.processorDef = processorDef;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ScriptTemplate script() {
|
public ScriptTemplate script() {
|
||||||
@ -38,18 +39,18 @@ public class ScalarFunctionAttribute extends TypedAttribute {
|
|||||||
return orderBy;
|
return orderBy;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String functionId() {
|
public ProcessorDefinition processorDef() {
|
||||||
return functionId;
|
return processorDef;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Expression canonicalize() {
|
protected Expression canonicalize() {
|
||||||
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, script, orderBy, functionId);
|
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, script, orderBy, processorDef);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, script, orderBy, functionId);
|
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, script, orderBy, processorDef);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -0,0 +1,95 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
|
public abstract class UnaryScalarFunction extends ScalarFunction {
|
||||||
|
|
||||||
|
private final Expression field;
|
||||||
|
|
||||||
|
protected UnaryScalarFunction(Location location) {
|
||||||
|
super(location);
|
||||||
|
this.field = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected UnaryScalarFunction(Location location, Expression field) {
|
||||||
|
super(location, singletonList(field));
|
||||||
|
this.field = field;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Expression field() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean foldable() {
|
||||||
|
return field.foldable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScalarFunctionAttribute toAttribute() {
|
||||||
|
String functionId = null;
|
||||||
|
Attribute attr = Expressions.attribute(field());
|
||||||
|
|
||||||
|
if (attr instanceof AggregateFunctionAttribute) {
|
||||||
|
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) attr;
|
||||||
|
functionId = afa.functionId();
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessor());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScript() {
|
||||||
|
if (field.foldable()) {
|
||||||
|
return asScriptFromFoldable(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
Attribute attr = Expressions.attribute(field());
|
||||||
|
if (attr != null) {
|
||||||
|
if (attr instanceof ScalarFunctionAttribute) {
|
||||||
|
return asScriptFrom((ScalarFunctionAttribute) attr);
|
||||||
|
}
|
||||||
|
if (attr instanceof AggregateFunctionAttribute) {
|
||||||
|
return asScriptFrom((AggregateFunctionAttribute) attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
// fall-back to
|
||||||
|
return asScriptFrom((FieldAttribute) attr);
|
||||||
|
}
|
||||||
|
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", field());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
|
||||||
|
return new ScriptTemplate(formatTemplate("{}"),
|
||||||
|
paramsBuilder().variable(foldable.fold()).build(),
|
||||||
|
foldable.dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
|
||||||
|
ScriptTemplate nested = scalar.script();
|
||||||
|
Params p = paramsBuilder().script(nested.params()).build();
|
||||||
|
return new ScriptTemplate(chainScalarTemplate(nested.template()), p, dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate);
|
||||||
|
|
||||||
|
protected abstract ScriptTemplate asScriptFrom(FieldAttribute field);
|
||||||
|
|
||||||
|
protected abstract String chainScalarTemplate(String template);
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
public class Add extends ArithmeticFunction {
|
||||||
|
|
||||||
|
public Add(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right, BinaryArithmeticOperation.ADD);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Number fold() {
|
||||||
|
return Arithmetics.add((Number) left().fold(), (Number) right().fold());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,101 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
import static java.lang.String.format;
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
|
|
||||||
|
public abstract class ArithmeticFunction extends BinaryScalarFunction {
|
||||||
|
|
||||||
|
private BinaryArithmeticOperation operation;
|
||||||
|
|
||||||
|
ArithmeticFunction(Location location, Expression left, Expression right, BinaryArithmeticOperation operation) {
|
||||||
|
super(location, left, right);
|
||||||
|
this.operation = operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BinaryArithmeticOperation operation() {
|
||||||
|
return operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
// left or right have to be compatible so either one works
|
||||||
|
return left().dataType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TypeResolution resolveType() {
|
||||||
|
if (!childrenResolved()) {
|
||||||
|
return new TypeResolution("Unresolved children");
|
||||||
|
}
|
||||||
|
DataType l = left().dataType();
|
||||||
|
DataType r = right().dataType();
|
||||||
|
|
||||||
|
TypeResolution resolution = resolveInputType(l);
|
||||||
|
|
||||||
|
if (resolution == TypeResolution.TYPE_RESOLVED) {
|
||||||
|
return resolveInputType(r);
|
||||||
|
}
|
||||||
|
return resolution;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected TypeResolution resolveInputType(DataType inputType) {
|
||||||
|
return inputType.isNumeric() ? TypeResolution.TYPE_RESOLVED
|
||||||
|
: new TypeResolution("'%s' requires a numeric type, not %s", operation, inputType.sqlName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) {
|
||||||
|
return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), operation.symbol(), rightScript.template()),
|
||||||
|
paramsBuilder().script(leftScript.params()).script(rightScript.params()).build(),
|
||||||
|
dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final BinaryArithmeticProcessorDefinition makeProcessor() {
|
||||||
|
return new BinaryArithmeticProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(left()), ProcessorDefinitions.toProcessorDefinition(right()), operation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append(left());
|
||||||
|
if (!(left() instanceof Literal)) {
|
||||||
|
sb.insert(0, "(");
|
||||||
|
sb.append(")");
|
||||||
|
}
|
||||||
|
sb.append(" ");
|
||||||
|
sb.append(operation);
|
||||||
|
sb.append(" ");
|
||||||
|
int pos = sb.length();
|
||||||
|
sb.append(right());
|
||||||
|
if (!(right() instanceof Literal)) {
|
||||||
|
sb.insert(pos, "(");
|
||||||
|
sb.append(")");
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean useParanthesis() {
|
||||||
|
return !(left() instanceof Literal) || !(right() instanceof Literal);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,105 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Arithmetic operation using the type widening rules of the JLS 5.6.2 namely
|
||||||
|
* widen to double or float or long or int in this order.
|
||||||
|
*/
|
||||||
|
abstract class Arithmetics {
|
||||||
|
|
||||||
|
static Number add(Number l, Number r) {
|
||||||
|
if (l instanceof Double || r instanceof Double) {
|
||||||
|
return Double.valueOf(l.doubleValue() + r.doubleValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Float || r instanceof Float) {
|
||||||
|
return Float.valueOf(l.floatValue() + r.floatValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Long || r instanceof Long) {
|
||||||
|
return Long.valueOf(Math.addExact(l.longValue(), r.longValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Integer.valueOf(Math.addExact(l.intValue(), r.intValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
static Number sub(Number l, Number r) {
|
||||||
|
if (l instanceof Double || r instanceof Double) {
|
||||||
|
return Double.valueOf(l.doubleValue() - r.doubleValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Float || r instanceof Float) {
|
||||||
|
return Float.valueOf(l.floatValue() - r.floatValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Long || r instanceof Long) {
|
||||||
|
return Long.valueOf(Math.subtractExact(l.longValue(), r.longValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Integer.valueOf(Math.subtractExact(l.intValue(), r.intValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
static Number mul(Number l, Number r) {
|
||||||
|
if (l instanceof Double || r instanceof Double) {
|
||||||
|
return Double.valueOf(l.doubleValue() * r.doubleValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Float || r instanceof Float) {
|
||||||
|
return Float.valueOf(l.floatValue() * r.floatValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Long || r instanceof Long) {
|
||||||
|
return Long.valueOf(Math.multiplyExact(l.longValue(), r.longValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Integer.valueOf(Math.multiplyExact(l.intValue(), r.intValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
static Number div(Number l, Number r) {
|
||||||
|
if (l instanceof Double || r instanceof Double) {
|
||||||
|
return l.doubleValue() / r.doubleValue();
|
||||||
|
}
|
||||||
|
if (l instanceof Float || r instanceof Float) {
|
||||||
|
return l.floatValue() / r.floatValue();
|
||||||
|
}
|
||||||
|
if (l instanceof Long || r instanceof Long) {
|
||||||
|
return l.longValue() / r.longValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
return l.intValue() / r.intValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Number mod(Number l, Number r) {
|
||||||
|
if (l instanceof Long || r instanceof Long) {
|
||||||
|
return Long.valueOf(Math.floorMod(l.longValue(), r.longValue()));
|
||||||
|
}
|
||||||
|
if (l instanceof Double || r instanceof Double) {
|
||||||
|
return Double.valueOf(l.doubleValue() % r.doubleValue());
|
||||||
|
}
|
||||||
|
if (l instanceof Float || r instanceof Float) {
|
||||||
|
return Float.valueOf(l.floatValue() % r.floatValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return Math.floorMod(l.intValue(), r.intValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
static Number negate(Number n) {
|
||||||
|
if (n instanceof Double) {
|
||||||
|
double d = n.doubleValue();
|
||||||
|
if (d == Double.MIN_VALUE) {
|
||||||
|
throw new ArithmeticException("double overflow");
|
||||||
|
}
|
||||||
|
return Double.valueOf(-n.doubleValue());
|
||||||
|
}
|
||||||
|
if (n instanceof Float) {
|
||||||
|
float f = n.floatValue();
|
||||||
|
if (f == Float.MIN_VALUE) {
|
||||||
|
throw new ArithmeticException("float overflow");
|
||||||
|
}
|
||||||
|
return Float.valueOf(-n.floatValue());
|
||||||
|
}
|
||||||
|
if (n instanceof Long) {
|
||||||
|
return Long.valueOf(Math.negateExact(n.longValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Integer.valueOf(Math.negateExact(n.intValue()));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,106 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BinaryProcessor;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
|
||||||
|
import static java.lang.String.format;
|
||||||
|
|
||||||
|
public class BinaryArithmeticProcessor extends BinaryProcessor {
|
||||||
|
|
||||||
|
public enum BinaryArithmeticOperation {
|
||||||
|
|
||||||
|
ADD(Arithmetics::add, "+"),
|
||||||
|
SUB(Arithmetics::sub, "-"),
|
||||||
|
MUL(Arithmetics::mul, "*"),
|
||||||
|
DIV(Arithmetics::div, "/"),
|
||||||
|
MOD(Arithmetics::mod, "%");
|
||||||
|
|
||||||
|
private final BiFunction<Number, Number, Number> process;
|
||||||
|
private final String symbol;
|
||||||
|
|
||||||
|
BinaryArithmeticOperation(BiFunction<Number, Number, Number> process, String symbol) {
|
||||||
|
this.process = process;
|
||||||
|
this.symbol = symbol;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String symbol() {
|
||||||
|
return symbol;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final Number apply(Number left, Number right) {
|
||||||
|
return process.apply(left, right);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return symbol;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final String NAME = "ab";
|
||||||
|
|
||||||
|
private final BinaryArithmeticOperation operation;
|
||||||
|
|
||||||
|
public BinaryArithmeticProcessor(Processor left, Processor right, BinaryArithmeticOperation operation) {
|
||||||
|
super(left, right);
|
||||||
|
this.operation = operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BinaryArithmeticProcessor(StreamInput in) throws IOException {
|
||||||
|
super(in);
|
||||||
|
operation = in.readEnum(BinaryArithmeticOperation.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getWriteableName() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doWrite(StreamOutput out) throws IOException {
|
||||||
|
out.writeEnum(operation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Object doProcess(Object left, Object right) {
|
||||||
|
return operation.apply((Number) left, (Number) right);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return operation.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
BinaryArithmeticProcessor other = (BinaryArithmeticProcessor) obj;
|
||||||
|
return Objects.equals(operation, other.operation)
|
||||||
|
&& Objects.equals(left(), other.left())
|
||||||
|
&& Objects.equals(right(), other.right());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return format(Locale.ROOT, "(%s %s %s)", left(), operation, right());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,53 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class BinaryArithmeticProcessorDefinition extends BinaryProcessorDefinition {
|
||||||
|
|
||||||
|
private final BinaryArithmeticOperation operation;
|
||||||
|
|
||||||
|
public BinaryArithmeticProcessorDefinition(Expression expression, ProcessorDefinition left, ProcessorDefinition right, BinaryArithmeticOperation operation) {
|
||||||
|
super(expression, left, right);
|
||||||
|
this.operation = operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BinaryArithmeticOperation operation() {
|
||||||
|
return operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BinaryArithmeticProcessor asProcessor() {
|
||||||
|
return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(left(), right(), operation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
BinaryArithmeticProcessorDefinition other = (BinaryArithmeticProcessorDefinition) obj;
|
||||||
|
return Objects.equals(operation, other.operation)
|
||||||
|
&& Objects.equals(left(), other.left())
|
||||||
|
&& Objects.equals(right(), other.right());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
|
|
||||||
|
public class Div extends ArithmeticFunction {
|
||||||
|
|
||||||
|
public Div(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right, BinaryArithmeticOperation.DIV);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return Arithmetics.div((Number) left().fold(), (Number) right().fold());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return DataTypeConversion.commonType(left().dataType(), right().dataType());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
public class Mod extends ArithmeticFunction {
|
||||||
|
|
||||||
|
public Mod(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right, BinaryArithmeticOperation.MOD);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return Arithmetics.mod((Number) left().fold(), (Number) right().fold());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
public class Mul extends ArithmeticFunction {
|
||||||
|
|
||||||
|
public Mul(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right, BinaryArithmeticOperation.MUL);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return Arithmetics.mul((Number) left().fold(), (Number) right().fold());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,68 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
|
public class Neg extends UnaryScalarFunction {
|
||||||
|
|
||||||
|
public Neg(Location location, Expression field) {
|
||||||
|
super(location, field);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TypeResolution resolveType() {
|
||||||
|
return Expressions.typeMustBeNumeric(field());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return Arithmetics.negate((Number) field().fold());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return field().dataType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
|
||||||
|
return new ScriptTemplate(formatTemplate("{}"),
|
||||||
|
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
|
||||||
|
dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
|
||||||
|
return new ScriptTemplate(formatTemplate("doc[{}].value"),
|
||||||
|
paramsBuilder().variable(field.name()).build(),
|
||||||
|
dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String chainScalarTemplate(String template) {
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ProcessorDefinition makeProcessor() {
|
||||||
|
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new UnaryArithmeticProcessor(UnaryArithmeticOperation.NEGATE));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
|
public class Sub extends ArithmeticFunction {
|
||||||
|
|
||||||
|
public Sub(Location location, Expression left, Expression right) {
|
||||||
|
super(location, left, right, BinaryArithmeticOperation.SUB);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object fold() {
|
||||||
|
return Arithmetics.sub((Number) left().fold(), (Number) right().fold());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,72 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class UnaryArithmeticProcessor implements Processor {
|
||||||
|
|
||||||
|
public enum UnaryArithmeticOperation {
|
||||||
|
|
||||||
|
NEGATE(Arithmetics::negate);
|
||||||
|
|
||||||
|
private final Function<Number, Number> process;
|
||||||
|
|
||||||
|
UnaryArithmeticOperation(Function<Number, Number> process) {
|
||||||
|
this.process = process;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final Number apply(Number number) {
|
||||||
|
return process.apply(number);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String symbol() {
|
||||||
|
return "-";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final String NAME = "au";
|
||||||
|
|
||||||
|
private final UnaryArithmeticOperation operation;
|
||||||
|
|
||||||
|
public UnaryArithmeticProcessor(UnaryArithmeticOperation operation) {
|
||||||
|
this.operation = operation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public UnaryArithmeticProcessor(StreamInput in) throws IOException {
|
||||||
|
operation = in.readEnum(UnaryArithmeticOperation.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getWriteableName() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeEnum(operation);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object process(Object input) {
|
||||||
|
if (input instanceof Number) {
|
||||||
|
return operation.apply((Number) input);
|
||||||
|
}
|
||||||
|
throw new SqlIllegalArgumentException("A number is required; received %s", input);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return operation.symbol() + super.toString();
|
||||||
|
}
|
||||||
|
}
|
@ -1,35 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
|
||||||
|
|
||||||
import org.joda.time.DateTimeFieldType;
|
|
||||||
import org.joda.time.ReadableDateTime;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts portions of {@link ReadableDateTime}s. Note that the position in the enum is used for serialization.
|
|
||||||
*/
|
|
||||||
public enum DateTimeExtractor {
|
|
||||||
DAY_OF_MONTH(DateTimeFieldType.dayOfMonth()),
|
|
||||||
DAY_OF_WEEK(DateTimeFieldType.dayOfWeek()),
|
|
||||||
DAY_OF_YEAR(DateTimeFieldType.dayOfYear()),
|
|
||||||
HOUR_OF_DAY(DateTimeFieldType.hourOfDay()),
|
|
||||||
MINUTE_OF_DAY(DateTimeFieldType.minuteOfDay()),
|
|
||||||
MINUTE_OF_HOUR(DateTimeFieldType.minuteOfHour()),
|
|
||||||
MONTH_OF_YEAR(DateTimeFieldType.monthOfYear()),
|
|
||||||
SECOND_OF_MINUTE(DateTimeFieldType.secondOfMinute()),
|
|
||||||
WEEK_OF_YEAR(DateTimeFieldType.weekOfWeekyear()),
|
|
||||||
YEAR(DateTimeFieldType.year());
|
|
||||||
|
|
||||||
private final DateTimeFieldType field;
|
|
||||||
|
|
||||||
DateTimeExtractor(DateTimeFieldType field) {
|
|
||||||
this.field = field;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int extract(ReadableDateTime dt) {
|
|
||||||
return dt.get(field);
|
|
||||||
}
|
|
||||||
}
|
|
@ -10,9 +10,11 @@ import org.elasticsearch.xpack.sql.expression.Expressions;
|
|||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aware.TimeZoneAware;
|
import org.elasticsearch.xpack.sql.expression.function.aware.TimeZoneAware;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.DateTimeProcessor;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -26,12 +28,12 @@ import static java.lang.String.format;
|
|||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
public abstract class DateTimeFunction extends ScalarFunction implements TimeZoneAware {
|
public abstract class DateTimeFunction extends UnaryScalarFunction implements TimeZoneAware {
|
||||||
|
|
||||||
private final DateTimeZone timeZone;
|
private final DateTimeZone timeZone;
|
||||||
|
|
||||||
public DateTimeFunction(Location location, Expression argument, DateTimeZone timeZone) {
|
public DateTimeFunction(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
this.timeZone = timeZone;
|
this.timeZone = timeZone;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,11 +41,15 @@ public abstract class DateTimeFunction extends ScalarFunction implements TimeZon
|
|||||||
return timeZone;
|
return timeZone;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean foldable() {
|
||||||
|
return field().foldable();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TypeResolution resolveType() {
|
protected TypeResolution resolveType() {
|
||||||
return argument().dataType().same(DataTypes.DATE) ?
|
return field().dataType().same(DataTypes.DATE) ?
|
||||||
TypeResolution.TYPE_RESOLVED :
|
TypeResolution.TYPE_RESOLVED :
|
||||||
new TypeResolution("Function '%s' cannot be applied on a non-date expression ('%s' of type '%s')", functionName(), Expressions.name(argument()), argument().dataType().esName());
|
new TypeResolution("Function '%s' cannot be applied on a non-date expression ('%s' of type '%s')", functionName(), Expressions.name(field()), field().dataType().esName());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -84,9 +90,8 @@ public abstract class DateTimeFunction extends ScalarFunction implements TimeZon
|
|||||||
return getClass().getSimpleName();
|
return getClass().getSimpleName();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
protected final ProcessorDefinition makeProcessor() {
|
||||||
public final ColumnProcessor asProcessor() {
|
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new DateTimeProcessor(extractor(), timeZone));
|
||||||
return new DateTimeProcessor(extractor(), timeZone);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract DateTimeExtractor extractor();
|
protected abstract DateTimeExtractor extractor();
|
||||||
|
@ -3,19 +3,45 @@
|
|||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeExtractor;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.DateTimeFieldType;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
import org.joda.time.ReadableDateTime;
|
import org.joda.time.ReadableDateTime;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
public class DateTimeProcessor implements ColumnProcessor {
|
public class DateTimeProcessor implements Processor {
|
||||||
public static final String NAME = "d";
|
|
||||||
|
public enum DateTimeExtractor {
|
||||||
|
DAY_OF_MONTH(DateTimeFieldType.dayOfMonth()),
|
||||||
|
DAY_OF_WEEK(DateTimeFieldType.dayOfWeek()),
|
||||||
|
DAY_OF_YEAR(DateTimeFieldType.dayOfYear()),
|
||||||
|
HOUR_OF_DAY(DateTimeFieldType.hourOfDay()),
|
||||||
|
MINUTE_OF_DAY(DateTimeFieldType.minuteOfDay()),
|
||||||
|
MINUTE_OF_HOUR(DateTimeFieldType.minuteOfHour()),
|
||||||
|
MONTH_OF_YEAR(DateTimeFieldType.monthOfYear()),
|
||||||
|
SECOND_OF_MINUTE(DateTimeFieldType.secondOfMinute()),
|
||||||
|
WEEK_OF_YEAR(DateTimeFieldType.weekOfWeekyear()),
|
||||||
|
YEAR(DateTimeFieldType.year());
|
||||||
|
|
||||||
|
private final DateTimeFieldType field;
|
||||||
|
|
||||||
|
DateTimeExtractor(DateTimeFieldType field) {
|
||||||
|
this.field = field;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int extract(ReadableDateTime dt) {
|
||||||
|
return dt.get(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final String NAME = "dt";
|
||||||
|
|
||||||
private final DateTimeExtractor extractor;
|
private final DateTimeExtractor extractor;
|
||||||
private final DateTimeZone timeZone;
|
private final DateTimeZone timeZone;
|
||||||
@ -25,7 +51,7 @@ public class DateTimeProcessor implements ColumnProcessor {
|
|||||||
this.timeZone = timeZone;
|
this.timeZone = timeZone;
|
||||||
}
|
}
|
||||||
|
|
||||||
DateTimeProcessor(StreamInput in) throws IOException {
|
public DateTimeProcessor(StreamInput in) throws IOException {
|
||||||
extractor = in.readEnum(DateTimeExtractor.class);
|
extractor = in.readEnum(DateTimeExtractor.class);
|
||||||
timeZone = DateTimeZone.forID(in.readString());
|
timeZone = DateTimeZone.forID(in.readString());
|
||||||
}
|
}
|
||||||
@ -46,7 +72,11 @@ public class DateTimeProcessor implements ColumnProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object apply(Object l) {
|
public Object process(Object l) {
|
||||||
|
if (l == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
ReadableDateTime dt;
|
ReadableDateTime dt;
|
||||||
// most dates are returned as long
|
// most dates are returned as long
|
||||||
if (l instanceof Long) {
|
if (l instanceof Long) {
|
||||||
@ -55,28 +85,29 @@ public class DateTimeProcessor implements ColumnProcessor {
|
|||||||
else {
|
else {
|
||||||
dt = (ReadableDateTime) l;
|
dt = (ReadableDateTime) l;
|
||||||
}
|
}
|
||||||
if (!timeZone.getID().equals("UTC")) {
|
if (!DateTimeZone.UTC.equals(timeZone)) {
|
||||||
dt = dt.toDateTime().withZone(timeZone);
|
dt = dt.toDateTime().withZone(timeZone);
|
||||||
}
|
}
|
||||||
return extractor.extract(dt);
|
return extractor.extract(dt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(extractor, timeZone);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == null || obj.getClass() != getClass()) {
|
if (obj == null || obj.getClass() != getClass()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
DateTimeProcessor other = (DateTimeProcessor) obj;
|
DateTimeProcessor other = (DateTimeProcessor) obj;
|
||||||
return extractor == other.extractor;
|
return Objects.equals(extractor, other.extractor)
|
||||||
}
|
&& Objects.equals(timeZone, other.timeZone);
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return extractor.hashCode();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return extractor.toString();
|
return extractor.toString();
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class DayOfMonth extends DateTimeFunction {
|
public class DayOfMonth extends DateTimeFunction {
|
||||||
public DayOfMonth(Location location, Expression argument, DateTimeZone timeZone) {
|
public DayOfMonth(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class DayOfWeek extends DateTimeFunction {
|
public class DayOfWeek extends DateTimeFunction {
|
||||||
public DayOfWeek(Location location, Expression argument, DateTimeZone timeZone) {
|
public DayOfWeek(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class DayOfYear extends DateTimeFunction {
|
public class DayOfYear extends DateTimeFunction {
|
||||||
public DayOfYear(Location location, Expression argument, DateTimeZone timeZone) {
|
public DayOfYear(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class HourOfDay extends DateTimeFunction {
|
public class HourOfDay extends DateTimeFunction {
|
||||||
public HourOfDay(Location location, Expression argument, DateTimeZone timeZone) {
|
public HourOfDay(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
@ -13,8 +14,8 @@ import java.time.temporal.ChronoField;
|
|||||||
|
|
||||||
public class MinuteOfDay extends DateTimeFunction {
|
public class MinuteOfDay extends DateTimeFunction {
|
||||||
|
|
||||||
public MinuteOfDay(Location location, Expression argument, DateTimeZone timeZone) {
|
public MinuteOfDay(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class MonthOfYear extends DateTimeFunction {
|
public class MonthOfYear extends DateTimeFunction {
|
||||||
public MonthOfYear(Location location, Expression argument, DateTimeZone timeZone) {
|
public MonthOfYear(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class SecondOfMinute extends DateTimeFunction {
|
public class SecondOfMinute extends DateTimeFunction {
|
||||||
public SecondOfMinute(Location location, Expression argument, DateTimeZone timeZone) {
|
public SecondOfMinute(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class WeekOfWeekYear extends DateTimeFunction {
|
public class WeekOfWeekYear extends DateTimeFunction {
|
||||||
public WeekOfWeekYear(Location location, Expression argument, DateTimeZone timeZone) {
|
public WeekOfWeekYear(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -6,14 +6,15 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.temporal.ChronoField;
|
import java.time.temporal.ChronoField;
|
||||||
|
|
||||||
public class Year extends DateTimeFunction {
|
public class Year extends DateTimeFunction {
|
||||||
public Year(Location location, Expression argument, DateTimeZone timeZone) {
|
public Year(Location location, Expression field, DateTimeZone timeZone) {
|
||||||
super(location, argument, timeZone);
|
super(location, field, timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -28,7 +29,7 @@ public class Year extends DateTimeFunction {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Expression orderBy() {
|
public Expression orderBy() {
|
||||||
return argument();
|
return field();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -39,5 +40,5 @@ public class Year extends DateTimeFunction {
|
|||||||
@Override
|
@Override
|
||||||
protected DateTimeExtractor extractor() {
|
protected DateTimeExtractor extractor() {
|
||||||
return DateTimeExtractor.YEAR;
|
return DateTimeExtractor.YEAR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class ACos extends MathFunction {
|
public class ACos extends MathFunction {
|
||||||
public ACos(Location location, Expression argument) {
|
public ACos(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.ACOS;
|
return MathOperation.ACOS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class ASin extends MathFunction {
|
public class ASin extends MathFunction {
|
||||||
public ASin(Location location, Expression argument) {
|
public ASin(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.ASIN;
|
return MathOperation.ASIN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class ATan extends MathFunction {
|
public class ATan extends MathFunction {
|
||||||
public ATan(Location location, Expression argument) {
|
public ATan(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.ATAN;
|
return MathOperation.ATAN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,21 +6,22 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
public class Abs extends MathFunction {
|
public class Abs extends MathFunction {
|
||||||
public Abs(Location location, Expression argument) {
|
public Abs(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.ABS;
|
return MathOperation.ABS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return argument().dataType();
|
return field().dataType();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Cbrt extends MathFunction {
|
public class Cbrt extends MathFunction {
|
||||||
public Cbrt(Location location, Expression argument) {
|
public Cbrt(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.CBRT;
|
return MathOperation.CBRT;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,23 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
|
|
||||||
public class Ceil extends MathFunction {
|
public class Ceil extends MathFunction {
|
||||||
public Ceil(Location location, Expression argument) {
|
public Ceil(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.CEIL;
|
return MathOperation.CEIL;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return DataTypeConversion.asInteger(field().dataType());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Cos extends MathFunction {
|
public class Cos extends MathFunction {
|
||||||
public Cos(Location location, Expression argument) {
|
public Cos(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.COS;
|
return MathOperation.COS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Cosh extends MathFunction {
|
public class Cosh extends MathFunction {
|
||||||
public Cosh(Location location, Expression argument) {
|
public Cosh(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.COSH;
|
return MathOperation.COSH;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,11 +6,12 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Degrees extends MathFunction {
|
public class Degrees extends MathFunction {
|
||||||
public Degrees(Location location, Expression argument) {
|
public Degrees(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -19,7 +20,7 @@ public class Degrees extends MathFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.DEGREES;
|
return MathOperation.DEGREES;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
@ -30,7 +31,7 @@ public class E extends MathFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.E;
|
return MathOperation.E;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Exp extends MathFunction {
|
public class Exp extends MathFunction {
|
||||||
public Exp(Location location, Expression argument) {
|
public Exp(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.EXP;
|
return MathOperation.EXP;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Expm1 extends MathFunction {
|
public class Expm1 extends MathFunction {
|
||||||
public Expm1(Location location, Expression argument) {
|
public Expm1(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.EXPM1;
|
return MathOperation.EXPM1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,23 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
|
|
||||||
public class Floor extends MathFunction {
|
public class Floor extends MathFunction {
|
||||||
public Floor(Location location, Expression argument) {
|
public Floor(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.FLOOR;
|
return MathOperation.FLOOR;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataType dataType() {
|
||||||
|
return DataTypeConversion.asInteger(field().dataType());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Log extends MathFunction {
|
public class Log extends MathFunction {
|
||||||
public Log(Location location, Expression argument) {
|
public Log(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.LOG;
|
return MathOperation.LOG;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Log10 extends MathFunction {
|
public class Log10 extends MathFunction {
|
||||||
public Log10(Location location, Expression argument) {
|
public Log10(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.LOG10;
|
return MathOperation.LOG10;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,10 +8,12 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
|||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.MathFunctionProcessor;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
@ -23,27 +25,27 @@ import static java.lang.String.format;
|
|||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
|
||||||
|
|
||||||
public abstract class MathFunction extends ScalarFunction {
|
public abstract class MathFunction extends UnaryScalarFunction {
|
||||||
|
|
||||||
protected MathFunction(Location location) {
|
protected MathFunction(Location location) {
|
||||||
super(location);
|
super(location);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected MathFunction(Location location, Expression argument) {
|
protected MathFunction(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean foldable() {
|
public boolean foldable() {
|
||||||
return argument().foldable();
|
return field().foldable();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String chainScalarTemplate(String template) {
|
protected String chainScalarTemplate(String template) {
|
||||||
return createTemplate(template);
|
return createTemplate(template);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
// TODO: isn't chain Scalar Template enough?
|
||||||
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
|
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
|
||||||
ScriptTemplate nested = scalar.script();
|
ScriptTemplate nested = scalar.script();
|
||||||
return new ScriptTemplate(createTemplate(nested.template()),
|
return new ScriptTemplate(createTemplate(nested.template()),
|
||||||
@ -79,9 +81,9 @@ public abstract class MathFunction extends ScalarFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final ColumnProcessor asProcessor() {
|
protected final ProcessorDefinition makeProcessor() {
|
||||||
return new MathFunctionProcessor(processor());
|
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new MathProcessor(operation()));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract MathProcessor processor();
|
protected abstract MathOperation operation();
|
||||||
}
|
}
|
@ -5,56 +5,112 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.function.DoubleFunction;
|
import java.util.function.DoubleFunction;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
public class MathProcessor implements Processor {
|
||||||
* Applies a math function. Note that the order of the enum constants is used for serialization.
|
|
||||||
*/
|
public enum MathOperation {
|
||||||
public enum MathProcessor {
|
ABS((Object l) -> {
|
||||||
ABS((Object l) -> {
|
if (l instanceof Float) {
|
||||||
if (l instanceof Float) {
|
return Math.abs(((Float) l).floatValue());
|
||||||
return Math.abs(((Float) l).floatValue());
|
}
|
||||||
|
if (l instanceof Double) {
|
||||||
|
return Math.abs(((Double) l).doubleValue());
|
||||||
|
}
|
||||||
|
long lo = ((Number) l).longValue();
|
||||||
|
return lo >= 0 ? lo : lo == Long.MIN_VALUE ? Long.MAX_VALUE : -lo;
|
||||||
|
}),
|
||||||
|
|
||||||
|
ACOS(Math::acos),
|
||||||
|
ASIN(Math::asin),
|
||||||
|
ATAN(Math::atan),
|
||||||
|
CBRT(Math::cbrt),
|
||||||
|
CEIL(Math::ceil),
|
||||||
|
COS(Math::cos),
|
||||||
|
COSH(Math::cosh),
|
||||||
|
DEGREES(Math::toDegrees),
|
||||||
|
E((Object l) -> Math.E),
|
||||||
|
EXP(Math::exp),
|
||||||
|
EXPM1(Math::expm1),
|
||||||
|
FLOOR(Math::floor),
|
||||||
|
LOG(Math::log),
|
||||||
|
LOG10(Math::log10),
|
||||||
|
PI((Object l) -> Math.PI),
|
||||||
|
RADIANS(Math::toRadians),
|
||||||
|
ROUND((DoubleFunction<Object>) Math::round),
|
||||||
|
SIN(Math::sin),
|
||||||
|
SINH(Math::sinh),
|
||||||
|
SQRT(Math::sqrt),
|
||||||
|
TAN(Math::tan);
|
||||||
|
|
||||||
|
private final Function<Object, Object> apply;
|
||||||
|
|
||||||
|
MathOperation(Function<Object, Object> apply) {
|
||||||
|
this.apply = apply;
|
||||||
}
|
}
|
||||||
if (l instanceof Double) {
|
|
||||||
return Math.abs(((Double) l).doubleValue());
|
MathOperation(DoubleFunction<Object> apply) {
|
||||||
|
this.apply = (Object l) -> apply.apply(((Number) l).doubleValue());
|
||||||
}
|
}
|
||||||
long lo = ((Number) l).longValue();
|
|
||||||
return lo >= 0 ? lo : lo == Long.MIN_VALUE ? Long.MAX_VALUE : -lo;
|
|
||||||
}),
|
|
||||||
ACOS(fromDouble(Math::acos)),
|
|
||||||
ASIN(fromDouble(Math::asin)),
|
|
||||||
ATAN(fromDouble(Math::atan)),
|
|
||||||
CBRT(fromDouble(Math::cbrt)),
|
|
||||||
CEIL(fromDouble(Math::ceil)),
|
|
||||||
COS(fromDouble(Math::cos)),
|
|
||||||
COSH(fromDouble(Math::cosh)),
|
|
||||||
DEGREES(fromDouble(Math::toDegrees)),
|
|
||||||
E((Object l) -> Math.E),
|
|
||||||
EXP(fromDouble(Math::exp)),
|
|
||||||
EXPM1(fromDouble(Math::expm1)),
|
|
||||||
FLOOR(fromDouble(Math::floor)),
|
|
||||||
LOG(fromDouble(Math::log)),
|
|
||||||
LOG10(fromDouble(Math::log10)),
|
|
||||||
PI((Object l) -> Math.PI),
|
|
||||||
RADIANS(fromDouble(Math::toRadians)),
|
|
||||||
ROUND(fromDouble(Math::round)),
|
|
||||||
SIN(fromDouble(Math::sin)),
|
|
||||||
SINH(fromDouble(Math::sinh)),
|
|
||||||
SQRT(fromDouble(Math::sqrt)),
|
|
||||||
TAN(fromDouble(Math::tan));
|
|
||||||
|
|
||||||
private final Function<Object, Object> apply;
|
public final Object apply(Object l) {
|
||||||
|
return apply.apply(l);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final String NAME = "m";
|
||||||
|
|
||||||
MathProcessor(Function<Object, Object> apply) {
|
private final MathOperation processor;
|
||||||
this.apply = apply;
|
|
||||||
|
public MathProcessor(MathOperation processor) {
|
||||||
|
this.processor = processor;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Function<Object, Object> fromDouble(DoubleFunction<Object> apply) {
|
public MathProcessor(StreamInput in) throws IOException {
|
||||||
return (Object l) -> apply.apply(((Number) l).doubleValue());
|
processor = in.readEnum(MathOperation.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public final Object apply(Object l) {
|
@Override
|
||||||
return apply.apply(l);
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeEnum(processor);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
@Override
|
||||||
|
public String getWriteableName() {
|
||||||
|
return NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object process(Object input) {
|
||||||
|
return processor.apply(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
MathOperation processor() {
|
||||||
|
return processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || obj.getClass() != getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
MathProcessor other = (MathProcessor) obj;
|
||||||
|
return processor == other.processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return processor.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return processor.toString();
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
@ -30,7 +31,7 @@ public class Pi extends MathFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.PI;
|
return MathOperation.PI;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,11 +6,12 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Radians extends MathFunction {
|
public class Radians extends MathFunction {
|
||||||
public Radians(Location location, Expression argument) {
|
public Radians(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -19,7 +20,7 @@ public class Radians extends MathFunction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.RADIANS;
|
return MathOperation.RADIANS;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -6,22 +6,23 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
|
|
||||||
public class Round extends MathFunction {
|
public class Round extends MathFunction {
|
||||||
public Round(Location location, Expression argument) {
|
public Round(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected MathOperation operation() {
|
||||||
|
return MathOperation.ROUND;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.LONG;
|
return DataTypeConversion.asInteger(field().dataType());
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected MathProcessor processor() {
|
|
||||||
return MathProcessor.ROUND;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Sin extends MathFunction {
|
public class Sin extends MathFunction {
|
||||||
public Sin(Location location, Expression argument) {
|
public Sin(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.SIN;
|
return MathOperation.SIN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Sinh extends MathFunction {
|
public class Sinh extends MathFunction {
|
||||||
public Sinh(Location location, Expression argument) {
|
public Sinh(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.SINH;
|
return MathOperation.SINH;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Sqrt extends MathFunction {
|
public class Sqrt extends MathFunction {
|
||||||
public Sqrt(Location location, Expression argument) {
|
public Sqrt(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.SQRT;
|
return MathOperation.SQRT;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
|
||||||
public class Tan extends MathFunction {
|
public class Tan extends MathFunction {
|
||||||
public Tan(Location location, Expression argument) {
|
public Tan(Location location, Expression field) {
|
||||||
super(location, argument);
|
super(location, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MathProcessor processor() {
|
protected MathOperation operation() {
|
||||||
return MathProcessor.TAN;
|
return MathOperation.TAN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,48 @@
|
|||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class AggPathInput extends UnresolvedInput<String> {
|
||||||
|
|
||||||
|
private final String innerKey;
|
||||||
|
|
||||||
|
public AggPathInput(Expression expression, String context) {
|
||||||
|
this(expression, context, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public AggPathInput(Expression expression, String context, String innerKey) {
|
||||||
|
super(expression, context);
|
||||||
|
this.innerKey = innerKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String innerKey() {
|
||||||
|
return innerKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(context(), innerKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
AggPathInput other = (AggPathInput) obj;
|
||||||
|
return Objects.equals(context(), other.context())
|
||||||
|
&& Objects.equals(innerKey, other.innerKey);
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user