Arithmetic

* big refactor of Processor by introducing ProcessorDefinition an
immutable tree structure used for resolving multiple inputs across
folding (in particular for aggregations) which at runtime gets
translated into 'compiled' or small Processors

Add expression arithmetic, expression folding and type coercion

Folding
* for literals, scalars and inside the optimizer

Type validation happens per type hierarchy (numeric vs decimal) not type
Ceil/Floor/Round functions return long/int instead of double
ScalarFunction preserves ProcessorDefinition instead of functionId

Original commit: elastic/x-pack-elasticsearch@a703f8b455
This commit is contained in:
Costin Leau 2017-09-12 19:13:25 +03:00
parent b8f5720283
commit 87293272d8
174 changed files with 3948 additions and 1626 deletions

View File

@ -27,14 +27,14 @@ public class ExplainIT extends CliIntegrationTestCase {
assertThat(in.readLine(), startsWith("----------"));
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][test_field{r}#"));
assertThat(in.readLine(), startsWith(" \\EsRelation[test][test_field{r}#"));
assertEquals("", in.readLine());
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test");
assertThat(in.readLine(), containsString("plan"));
assertThat(in.readLine(), startsWith("----------"));
assertThat(in.readLine(), startsWith("Project[[test_field{r}#"));
assertThat(in.readLine(), startsWith("\\_CatalogTable[test][test_field{r}#"));
assertThat(in.readLine(), startsWith("\\EsRelation[test][test_field{r}#"));
assertEquals("", in.readLine());
// TODO in this case we should probably remove the source filtering entirely. Right? It costs but we don't need it.
@ -71,7 +71,7 @@ public class ExplainIT extends CliIntegrationTestCase {
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
assertThat(in.readLine(), startsWith(" \\_SubQueryAlias[test]"));
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
assertEquals("", in.readLine());
command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2");
@ -79,7 +79,7 @@ public class ExplainIT extends CliIntegrationTestCase {
assertThat(in.readLine(), startsWith("----------"));
assertThat(in.readLine(), startsWith("Project[[i{r}#"));
assertThat(in.readLine(), startsWith("\\_Filter[i{r}#"));
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
assertEquals("", in.readLine());
command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test WHERE i = 2");
@ -124,14 +124,14 @@ public class ExplainIT extends CliIntegrationTestCase {
assertThat(in.readLine(), startsWith("----------"));
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
assertThat(in.readLine(), startsWith("\\_SubQueryAlias[test]"));
assertThat(in.readLine(), startsWith(" \\_CatalogTable[test][i{r}#"));
assertThat(in.readLine(), startsWith(" \\EsRelation[test][i{r}#"));
assertEquals("", in.readLine());
command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test");
assertThat(in.readLine(), containsString("plan"));
assertThat(in.readLine(), startsWith("----------"));
assertThat(in.readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
assertThat(in.readLine(), startsWith("\\_CatalogTable[test][i{r}#"));
assertThat(in.readLine(), startsWith("\\EsRelation[test][i{r}#"));
assertEquals("", in.readLine());
command("EXPLAIN (PLAN EXECUTABLE) SELECT COUNT(*) FROM test");

View File

@ -9,15 +9,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcTestUtils;
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import java.nio.file.Path;
import java.util.List;
@TestLogging(JdbcTestUtils.SQL_TRACE)
public abstract class DebugSqlSpec extends SqlSpecIT {
public class DebugSqlSpec extends SqlSpecIT {
public static LocalH2 H2 = new LocalH2();
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecIT.PARAM_FORMATTING)
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception {
Parser parser = specParser();
return readScriptSpec("/debug.sql-spec", parser);
}

View File

@ -37,7 +37,9 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
readScriptSpec("/filter.sql-spec", parser),
readScriptSpec("/datetime.sql-spec", parser),
readScriptSpec("/math.sql-spec", parser),
readScriptSpec("/agg.sql-spec", parser));
readScriptSpec("/agg.sql-spec", parser),
readScriptSpec("/arithmetic.sql-spec", parser)
);
}
// NOCOMMIT: add tests for nested docs when interplug communication is enabled

View File

@ -13,6 +13,7 @@ import org.junit.AfterClass;
import org.junit.Before;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
@ -123,7 +124,11 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
// returns groupName, testName, its line location, its source and the custom object (based on each test parser)
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
Path source = PathUtils.get(SpecBaseIntegrationTestCase.class.getResource(url).toURI());
URL resource = SpecBaseIntegrationTestCase.class.getResource(url);
if (resource == null) {
throw new IllegalArgumentException("Cannot find resource " + url);
}
Path source = PathUtils.get(resource.toURI());
String fileName = source.getFileName().toString();
int dot = fileName.indexOf(".");
String groupName = dot > 0 ? fileName.substring(0, dot) : fileName;

View File

@ -0,0 +1,13 @@
//
// Arithmetic tests outside H2
//
// the standard behavior here is to return the constant for each element
// the weird thing is that an actual query needs to be ran
arithmeticWithFrom
SELECT 5 - 2 x FROM test_emp;
x
3
;

View File

@ -0,0 +1,65 @@
//
// Arithmetic tests
//
unaryMinus
SELECT - 1 AS x;
plus
SELECT 1 + 1 AS x;
minus
SELECT 1 - 1 AS x;
divide
SELECT 6 / 3 AS x;
multiply
SELECT 2 * 3 AS x;
mod
SELECT 5 % 2 AS x;
operatorsPriority
SELECT 1 + 3 * 4 / 2 - 2 AS x;
operatorsPriorityWithParanthesis
SELECT ((1 + 3) * 2 / (3 - 1)) * 2 AS x;
literalAliasing
SELECT 2 + 3 AS x, 'foo' y;
// variable scalar arithmetic
scalarVariablePlus
SELECT emp_no + 10000 AS x FROM test_emp;
scalarVariableMinus
SELECT emp_no - 10000 AS x FROM test_emp;
scalarVariableMul
SELECT emp_no * 10000 AS x FROM test_emp;
scalarVariableDiv
SELECT emp_no / 10000 AS x FROM test_emp;
scalarVariableMod
SELECT emp_no % 10000 AS x FROM test_emp;
scalarVariableMultipleInputs
SELECT (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
scalarVariableTwoInputs
SELECT (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
scalarVariableThreeInputs
SELECT ((emp_no % 10000) + YEAR(hire_date)) / MONTH(birth_date) AS x FROM test_emp;
scalarVariableArithmeticAndEntry
SELECT emp_no, emp_no % 10000 AS x FROM test_emp;
scalarVariableTwoInputsAndEntry
SELECT emp_no, (emp_no % 10000) + YEAR(hire_date) AS x FROM test_emp;
scalarVariableThreeInputsAndEntry
SELECT emp_no, ((emp_no % 10000) + YEAR(hire_date)) / MONTH(birth_date) AS x FROM test_emp;
// variable scalar agg
aggVariablePlus
SELECT COUNT(*) + 10000 AS x FROM test_emp GROUP BY gender;
aggVariableMinus
SELECT COUNT(*) - 10000 AS x FROM test_emp GROUP BY gender;
aggVariableMul
SELECT COUNT(*) * 2 AS x FROM test_emp GROUP BY gender;
aggVariableDiv
SELECT COUNT(*) / 5000 AS x FROM test_emp GROUP BY gender;
aggVariableMod
SELECT COUNT(*) % 10000 AS x FROM test_emp GROUP BY gender;
aggVariableTwoInputs
SELECT MAX(emp_no) - MIN(emp_no) AS x FROM test_emp GROUP BY gender;
aggVariableThreeInputs
SELECT (MAX(emp_no) - MIN(emp_no)) + AVG(emp_no) AS x FROM test_emp GROUP BY gender;

View File

@ -9,7 +9,7 @@
columnDetectionOverride
SELECT gender, FLOOR(PERCENTILE(emp_no, 97.76)) p1 FROM test_emp GROUP BY gender;
gender:s | p1:double
gender:s | p1:l
M | 10095
F | 10099
;

View File

@ -3,9 +3,11 @@
//
debug
SELECT gender, PERCENTILE(emp_no, 97.76) p1, PERCENTILE(emp_no, 93.3) p2, PERCENTILE_RANK(emp_no, 10025) rank FROM test_emp GROUP BY gender;
// resolution should happen on the adjiacent nodes as well
//SELECT 1+2+3 x, x + 3 AS y, y FROM test_emp;
gender | p1 | p2 | rank
M | 10095.6112 | 10090.846 | 23.41269841269841
F | 10099.1936 | 10096.351999999999 | 26.351351351351347
SELECT 2 + 3 x, 'foo', x + 1 z;
x | 'foo' | z
5 | 'foo' | 6
;

View File

@ -3,4 +3,6 @@
//
debug
SELECT * FROM test_emp WHERE emp_no IS NULL ORDER BY emp_no LIMIT 5 ;
SELECT emp_no, CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
//SELECT YEAR(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY YEAR(birth_date) ORDER BY YEAR(birth_date) LIMIT 5;
//SELECT emp_no, SIN(emp_no) + emp_no % 10000 + YEAR(hire_date) / 1000 AS s, emp_no AS y FROM test_emp WHERE emp_no = 10010;

View File

@ -13,7 +13,8 @@ SELECT ATAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY
//mathCbrt
//SELECT CBRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathCeil
SELECT CEIL(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
// H2 returns CEIL as a double despite the value being an integer; we return a long as the other DBs
SELECT CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathCos
SELECT COS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathCosh
@ -62,7 +63,7 @@ SELECT emp_no, ASIN(emp_no) m, first_name FROM "test_emp" WHERE ASIN(emp_no) < 1
//mathATanFilterAndOrder
//SELECT emp_no, ATAN(emp_no) m, first_name FROM "test_emp" WHERE ATAN(emp_no) < 10010 ORDER BY ATAN(emp_no);
mathCeilFilterAndOrder
SELECT emp_no, CEIL(emp_no) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
SELECT emp_no, CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE CEIL(emp_no) < 10010 ORDER BY CEIL(emp_no);
//mathCosFilterAndOrder
//SELECT emp_no, COS(emp_no) m, first_name FROM "test_emp" WHERE COS(emp_no) < 10010 ORDER BY COS(emp_no);
//mathCoshFilterAndOrder

View File

@ -34,11 +34,13 @@ import org.elasticsearch.xpack.sql.expression.function.Functions;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.ArithmeticFunction;
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
import org.elasticsearch.xpack.sql.plan.logical.CatalogTable;
import org.elasticsearch.xpack.sql.plan.logical.EsRelation;
import org.elasticsearch.xpack.sql.plan.logical.Filter;
import org.elasticsearch.xpack.sql.plan.logical.Join;
import org.elasticsearch.xpack.sql.plan.logical.LocalRelation;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
import org.elasticsearch.xpack.sql.plan.logical.Project;
@ -49,7 +51,10 @@ import org.elasticsearch.xpack.sql.rule.Rule;
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.tree.Node;
import org.elasticsearch.xpack.sql.tree.NodeUtils;
import org.elasticsearch.xpack.sql.type.CompoundDataType;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.ArrayList;
@ -90,7 +95,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
new ResolveFunctions(),
new ResolveAliases(),
new ProjectedAggregations(),
new ResolveAggsInHavingAndOrderBy()
new ResolveAggsInHavingAndOrderBy()
//new ImplicitCasting()
);
// TODO: this might be removed since the deduplication happens already in ResolveFunctions
Batch deduplication = new Batch("Deduplication",
@ -226,6 +232,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
return ur;
}
// inlined queries (SELECT 1 + 2) are already resolved
else if (p instanceof LocalRelation) {
return p;
}
return p.transformExpressionsDown(e -> {
if (e instanceof SubQueryExpression) {
SubQueryExpression sq = (SubQueryExpression) e;
@ -234,6 +245,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
return e;
});
}
@Override
protected boolean skipResolved() {
return false;
}
}
private class ResolveTable extends AnalyzeRule<UnresolvedRelation> {
@ -250,7 +266,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
throw new UnknownIndexException(table.index(), plan);
}
LogicalPlan catalogTable = new CatalogTable(plan.location(), found);
LogicalPlan catalogTable = new EsRelation(plan.location(), found);
SubQueryAlias sa = new SubQueryAlias(plan.location(), catalogTable, table.index());
if (plan.alias() != null) {
@ -466,7 +482,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
if (ordinal != null) {
changed = true;
if (ordinal > 0 && ordinal <= max) {
NamedExpression reference = aggregates.get(ordinal);
NamedExpression reference = aggregates.get(ordinal - 1);
if (containsAggregate(reference)) {
throw new AnalysisException(exp, "Group ordinal %d refers to an aggregate function %s which is not compatible/allowed with GROUP BY", ordinal, reference.nodeName());
}
@ -724,8 +740,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
if (child instanceof Cast) {
Cast c = (Cast) child;
if (c.argument() instanceof NamedExpression) {
return new Alias(c.location(), ((NamedExpression) c.argument()).name(), c);
if (c.field() instanceof NamedExpression) {
return new Alias(c.location(), ((NamedExpression) c.field()).name(), c);
}
}
//TODO: maybe add something closer to SQL
@ -966,6 +982,52 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
}
private class ImplicitCasting extends AnalyzeRule<LogicalPlan> {
@Override
protected boolean skipResolved() {
return false;
}
@Override
protected LogicalPlan rule(LogicalPlan plan) {
return plan.transformExpressionsDown(this::implicitCast);
}
private Expression implicitCast(Expression e) {
if (!e.childrenResolved()) {
return e;
}
Expression left = null, right = null;
// BinaryOperations are ignored as they are pushed down to ES
// and casting (and thus Aliasing when folding) gets in the way
if (e instanceof ArithmeticFunction) {
ArithmeticFunction f = (ArithmeticFunction) e;
left = f.left();
right = f.right();
}
if (left != null) {
DataType l = left.dataType();
DataType r = right.dataType();
if (!l.same(r)) {
DataType common = DataTypeConversion.commonType(l, r);
if (common == null) {
return e;
}
left = l.same(common) ? left : new Cast(left.location(), left, common);
right = r.same(common) ? right : new Cast(right.location(), right, common);
return NodeUtils.copyTree(e, Arrays.asList(left, right));
}
}
return e;
}
}
abstract static class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
// transformUp (post-order) - that is first children and then the node

View File

@ -11,11 +11,9 @@ import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.expression.function.Functions;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
import org.elasticsearch.xpack.sql.plan.logical.Filter;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.Project;
import org.elasticsearch.xpack.sql.tree.Node;
import java.util.ArrayList;
@ -103,9 +101,6 @@ abstract class Verifier {
else if (ae instanceof Attribute && !ae.resolved()) {
localFailures.add(fail(e, "Cannot resolved '%s' from columns %s", Expressions.name(ae), p.intputSet()));
}
else if (ae instanceof Cast && !(p instanceof Project || p instanceof Aggregate)) {
localFailures.add(fail(ae, "Cast is (currently) only supported in SELECT and GROUP BY; not in %s", p.nodeName()));
}
}));
// consider only nodes that are by themselves unresolved (to avoid unresolved dependencies)

View File

@ -0,0 +1,126 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
import java.util.Arrays;
import java.util.List;
/**
* Aggregations are returned by Elasticsearch in a tree structure where each nested level can have a different size.
* For example a group by a, b, c results in 3-level nested array where each level contains all the relevant values
* for its parent entry.
* Assuming there's a total of 2 A's, 3 B's and 5 C's, the values will be
* A-agg level = { A1, A2 }
* B-agg level = { { A1B1, A1B2, A1B3 }, { A2B1, A2B2, A2B3 }
* C-agg level = { { { A1B1C1, A1B1C2 ..}, { A1B2C1, etc... } } } and so on
*
* Further more the columns are added in the order in which they are requested (0, 1, 2) eliminating the need for keys as these are implicit (their position in the list).
*
* To help with the iteration, there are two dedicated counters :
* one that carries (increments) the counter for each level (indicated by the position inside the array) once the children reach their max
* a flat cursor to indicate the row
*/
class AggValues {
private int row = 0;
private final List<Object[]> columns;
private int[] indexPerLevel;
private int size;
AggValues(List<Object[]> columns) {
this.columns = columns;
}
void init(int maxDepth, int limit) {
int sz = computeSize(columns, maxDepth);
size = limit > 0 ? Math.min(limit, sz) : sz;
indexPerLevel = new int[maxDepth + 1];
}
private static int computeSize(List<Object[]> columns, int maxDepth) {
// look only at arrays with the right depth (the others might be
// counters or other functions)
// then return the parent array to compute the actual returned results
Object[] leafArray = null;
for (int i = 0; i < columns.size() && leafArray == null; i++) {
Object[] col = columns.get(i);
Object o = col;
int level = 0;
Object[] parent = null;
// keep unwrapping until the desired level is reached
while (o instanceof Object[]) {
col = ((Object[]) o);
if (col.length > 0) {
if (level == maxDepth) {
leafArray = parent;
break;
} else {
parent = col;
level++;
o = col[0];
}
} else {
o = null;
}
}
}
if (leafArray == null) {
return columns.get(0).length;
}
int sz = 0;
for (Object leaf : leafArray) {
sz += ((Object[]) leaf).length;
}
return sz;
}
Object column(int column) {
Object o = columns.get(column);
for (int lvl = 0; o instanceof Object[]; lvl++) {
Object[] arr = (Object[]) o;
// the current branch is done
if (indexPerLevel[lvl] == arr.length) {
// reset the current branch
indexPerLevel[lvl] = 0;
// bump the parent - if it's too big it, the loop will restart
// again from that position
indexPerLevel[lvl - 1]++;
// restart the loop
lvl = -1;
o = columns.get(column);
} else {
o = arr[indexPerLevel[lvl]];
}
}
return o;
}
int size() {
return size;
}
void reset() {
row = 0;
Arrays.fill(indexPerLevel, 0);
}
boolean nextRow() {
if (row < size - 1) {
row++;
// increment leaf counter - the size check is done lazily while retrieving the columns
indexPerLevel[indexPerLevel.length - 1]++;
return true;
}
return false;
}
boolean hasCurrentRow() {
return row < size;
}
}

View File

@ -5,106 +5,28 @@
*/
package org.elasticsearch.xpack.sql.execution.search;
import java.util.Arrays;
import java.util.List;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
import org.elasticsearch.xpack.sql.session.Cursor;
import org.elasticsearch.xpack.sql.type.Schema;
//
// Aggregations are returned in a tree structure where each nested level can have a different size.
// For example a group by a, b, c results in 3-level nested array where each level contains all the relevant values
// for its parent entry.
// Assuming there's a total of 2 A's, 3 B's and 5 C's, the values will be
// A-agg level = { A1, A2 }
// B-agg level = { { A1B1, A1B2, A1B3 }, { A2B1, A2B2, A2B3 }
// C-agg level = { { { A1B1C1, A1B1C2 ..}, { A1B2C1, etc... } } } and so on
//
// To help with the iteration, there are two dedicated counters :
// - one that carries (increments) the counter for each level (indicated by the position inside the array) once the children reach their max
// - a flat cursor to indicate the row
import java.util.List;
import java.util.function.Supplier;
class AggsRowSetCursor extends AbstractRowSetCursor {
private int row = 0;
private final AggValues agg;
private final List<Supplier<Object>> columns;
private final List<Object[]> columns;
private final int[] indexPerLevel;
private final int size;
AggsRowSetCursor(Schema schema, List<Object[]> columns, int maxDepth, int limit) {
AggsRowSetCursor(Schema schema, AggValues agg, List<Supplier<Object>> columns) {
super(schema, null);
this.agg = agg;
this.columns = columns;
int sz = computeSize(columns, maxDepth);
size = limit > 0 ? Math.min(limit, sz) : sz;
indexPerLevel = new int[maxDepth + 1];
}
private static int computeSize(List<Object[]> columns, int maxDepth) {
// look only at arrays with the right depth (the others might be counters or other functions)
// then return the parent array to compute the actual returned results
Object[] leafArray = null;
for (int i = 0; i < columns.size() && leafArray == null; i++) {
Object[] col = columns.get(i);
Object o = col;
int level = 0;
Object[] parent = null;
// keep unwrapping until the desired level is reached
while (o instanceof Object[]) {
col = ((Object[]) o);
if (col.length > 0) {
if (level == maxDepth) {
leafArray = parent;
break;
}
else {
parent = col;
level++;
o = col[0];
}
}
else {
o = null;
}
}
}
if (leafArray == null) {
return columns.get(0).length;
}
int sz = 0;
for (Object leaf : leafArray) {
sz += ((Object[]) leaf).length;
}
return sz;
}
@Override
protected Object getColumn(int column) {
Object o = columns.get(column);
for (int lvl = 0; o instanceof Object[]; lvl++) {
Object[] arr = (Object[]) o;
// the current branch is done
if (indexPerLevel[lvl] == arr.length) {
// reset the current branch
indexPerLevel[lvl] = 0;
// bump the parent - if it's too big it, the loop will restart again from that position
indexPerLevel[lvl - 1]++;
// restart the loop
lvl = -1;
o = columns.get(column);
}
else {
o = arr[indexPerLevel[lvl]];
}
}
return o;
return columns.get(column).get();
}
@Override
@ -114,24 +36,17 @@ class AggsRowSetCursor extends AbstractRowSetCursor {
@Override
protected boolean doNext() {
if (row < size() - 1) {
row++;
// increment leaf counter - the size check is done lazily while retrieving the columns
indexPerLevel[indexPerLevel.length - 1]++;
return true;
}
return false;
return agg.nextRow();
}
@Override
protected void doReset() {
row = 0;
Arrays.fill(indexPerLevel, 0);
agg.reset();
}
@Override
public int size() {
return size;
return agg.size();
}
@Override

View File

@ -1,79 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import java.io.IOException;
import java.util.Objects;
class ProcessingHitExtractor implements HitExtractor {
static final String NAME = "p";
private final HitExtractor delegate;
private final ColumnProcessor processor;
ProcessingHitExtractor(HitExtractor delegate, ColumnProcessor processor) {
this.delegate = delegate;
this.processor = processor;
}
ProcessingHitExtractor(StreamInput in) throws IOException {
delegate = in.readNamedWriteable(HitExtractor.class);
processor = in.readNamedWriteable(ColumnProcessor.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(delegate);
out.writeNamedWriteable(processor);
}
@Override
public String getWriteableName() {
return NAME;
}
HitExtractor delegate() {
return delegate;
}
ColumnProcessor processor() {
return processor;
}
@Override
public Object get(SearchHit hit) {
return processor.apply(delegate.get(hit));
}
@Override
public String innerHitName() {
return delegate.innerHitName();
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
ProcessingHitExtractor other = (ProcessingHitExtractor) obj;
return delegate.equals(other.delegate)
&& processor.equals(other.processor);
}
@Override
public int hashCode() {
return Objects.hash(delegate, processor);
}
@Override
public String toString() {
return processor + "(" + delegate + ")";
}
}

View File

@ -16,6 +16,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors;
import org.elasticsearch.xpack.sql.session.Cursor;
import org.elasticsearch.xpack.sql.session.RowSetCursor;
import org.elasticsearch.xpack.sql.type.DataType;
@ -36,7 +38,7 @@ public class ScrollCursor implements Cursor {
/**
* {@link NamedWriteableRegistry} used to resolve the {@link #extractors}.
*/
private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(HitExtractor.getNamedWriteables());
private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(HitExtractors.getNamedWriteables());
private final String scrollId;
private final List<HitExtractor> extractors;

View File

@ -23,13 +23,24 @@ import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.execution.ExecutionException;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggValueInput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.HitExtractorInput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
import org.elasticsearch.xpack.sql.querydsl.container.AggRef;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
import org.elasticsearch.xpack.sql.querydsl.container.NestedFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.ProcessingRef;
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
import org.elasticsearch.xpack.sql.querydsl.container.Reference;
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.TotalCountRef;
@ -38,11 +49,12 @@ import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SqlSettings;
import org.elasticsearch.xpack.sql.type.Schema;
import org.elasticsearch.xpack.sql.util.ObjectUtils;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Supplier;
// TODO: add retry/back-off
public class Scroller {
@ -68,7 +80,9 @@ public class Scroller {
// prepare the request
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(query);
log.trace("About to execute query {} on {}", sourceBuilder, index);
if (log.isTraceEnabled()) {
log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index);
}
SearchRequest search = client.prepareSearch(index).setSource(sourceBuilder).request();
search.scroll(keepAlive).source().timeout(timeout);
@ -79,7 +93,9 @@ public class Scroller {
search.source().size(sz);
}
ScrollerActionListener l = query.isAggsOnly() ? new AggsScrollActionListener(listener, client, timeout, schema, query) : new HandshakeScrollActionListener(listener, client, timeout, schema, query);
boolean isAggsOnly = query.isAggsOnly();
ScrollerActionListener l = isAggsOnly ? new AggsScrollActionListener(listener, client, timeout, schema, query) : new HandshakeScrollActionListener(listener, client, timeout, schema, query);
client.search(search, l);
}
@ -91,7 +107,7 @@ public class Scroller {
// dedicated scroll used for aggs-only/group-by results
static class AggsScrollActionListener extends ScrollerActionListener {
private final QueryContainer query;
AggsScrollActionListener(ActionListener<RowSetCursor> listener, Client client, TimeValue keepAlive, Schema schema, QueryContainer query) {
@ -101,72 +117,90 @@ public class Scroller {
@Override
protected RowSetCursor handleResponse(SearchResponse response) {
Aggregations aggs = response.getAggregations();
List<Object[]> columns = new ArrayList<>();
final List<Object[]> extractedAggs = new ArrayList<>();
AggValues aggValues = new AggValues(extractedAggs);
List<Supplier<Object>> aggColumns = new ArrayList<>(query.columns().size());
// this method assumes the nested aggregation are all part of the same tree (the SQL group-by)
int maxDepth = -1;
List<ColumnReference> cols = query.columns();
for (int index = 0; index < cols.size(); index++) {
ColumnReference col = cols.get(index);
Supplier<Object> supplier = null;
for (Reference ref : query.refs()) {
Object[] arr = null;
ColumnProcessor processor = null;
if (ref instanceof ProcessingRef) {
ProcessingRef pRef = (ProcessingRef) ref;
processor = pRef.processor();
ref = pRef.ref();
if (col instanceof ComputedRef) {
ComputedRef pRef = (ComputedRef) col;
Processor processor = pRef.processor().transformUp(a -> {
Object[] value = extractAggValue(new AggRef(a.context()), response);
extractedAggs.add(value);
final int aggPosition = extractedAggs.size() - 1;
return new AggValueInput(a.expression(), () -> aggValues.column(aggPosition), a.innerKey());
}, AggPathInput.class).asProcessor();
// the input is provided through the value input above
supplier = () -> processor.process(null);
}
else {
extractedAggs.add(extractAggValue(col, response));
final int aggPosition = extractedAggs.size() - 1;
supplier = () -> aggValues.column(aggPosition);
}
if (ref == TotalCountRef.INSTANCE) {
arr = new Object[] { processIfNeeded(processor, Long.valueOf(response.getHits().getTotalHits())) };
columns.add(arr);
aggColumns.add(supplier);
if (col.depth() > maxDepth) {
maxDepth = col.depth();
}
else if (ref instanceof AggRef) {
}
aggValues.init(maxDepth, query.limit());
clearScroll(response.getScrollId());
return new AggsRowSetCursor(schema, aggValues, aggColumns);
}
private Object[] extractAggValue(ColumnReference col, SearchResponse response) {
if (col == TotalCountRef.INSTANCE) {
return new Object[] { Long.valueOf(response.getHits().getTotalHits()) };
}
else if (col instanceof AggRef) {
Object[] arr;
String path = ((AggRef) col).path();
// yup, this is instance equality to make sure we only check the path used by the code
if (path == TotalCountRef.PATH) {
arr = new Object[] { Long.valueOf(response.getHits().getTotalHits()) };
}
else {
// workaround for elastic/elasticsearch/issues/23056
String path = ((AggRef) ref).path();
boolean formattedKey = AggPath.isBucketValueFormatted(path);
if (formattedKey) {
path = AggPath.bucketValueWithoutFormat(path);
}
Object value = getAggProperty(aggs, path);
// // FIXME: this can be tabular in nature
// if (ref instanceof MappedAggRef) {
// Map<String, Object> map = (Map<String, Object>) value;
// Object extractedValue = map.get(((MappedAggRef) ref).fieldName());
// }
Object value = getAggProperty(response.getAggregations(), path);
// // FIXME: this can be tabular in nature
// if (ref instanceof MappedAggRef) {
// Map<String, Object> map = (Map<String, Object>) value;
// Object extractedValue = map.get(((MappedAggRef)
// ref).fieldName());
// }
if (formattedKey) {
List<? extends Bucket> buckets = ((MultiBucketsAggregation) value).getBuckets();
arr = new Object[buckets.size()];
for (int i = 0; i < buckets.size(); i++) {
arr[i] = buckets.get(i).getKeyAsString();
}
}
else {
} else {
arr = value instanceof Object[] ? (Object[]) value : new Object[] { value };
}
// process if needed
for (int i = 0; i < arr.length; i++) {
arr[i] = processIfNeeded(processor, arr[i]);
}
columns.add(arr);
}
// aggs without any grouping
else {
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", ref.getClass());
}
if (ref.depth() > maxDepth) {
maxDepth = ref.depth();
}
return arr;
}
clearScroll(response.getScrollId());
return new AggsRowSetCursor(schema, columns, maxDepth, query.limit());
throw new SqlIllegalArgumentException("Unexpected non-agg/grouped column specified; %s", col.getClass());
}
private static Object getAggProperty(Aggregations aggs, String path) {
@ -178,10 +212,6 @@ public class Scroller {
}
return agg.getProperty(list.subList(1, list.size()));
}
private Object processIfNeeded(ColumnProcessor processor, Object value) {
return processor != null ? processor.apply(value) : value;
}
}
// initial scroll used for parsing search hits (handles possible aggs)
@ -202,17 +232,17 @@ public class Scroller {
@Override
protected List<HitExtractor> getExtractors() {
// create response extractors for the first time
List<Reference> refs = query.refs();
List<ColumnReference> refs = query.columns();
List<HitExtractor> exts = new ArrayList<>(refs.size());
for (Reference ref : refs) {
for (ColumnReference ref : refs) {
exts.add(createExtractor(ref));
}
return exts;
}
private HitExtractor createExtractor(Reference ref) {
private HitExtractor createExtractor(ColumnReference ref) {
if (ref instanceof SearchHitFieldRef) {
SearchHitFieldRef f = (SearchHitFieldRef) ref;
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
@ -228,9 +258,10 @@ public class Scroller {
return new DocValueExtractor(f.name());
}
if (ref instanceof ProcessingRef) {
ProcessingRef pRef = (ProcessingRef) ref;
return new ProcessingHitExtractor(createExtractor(pRef.ref()), pRef.processor());
if (ref instanceof ComputedRef) {
ProcessorDefinition proc = ((ComputedRef) ref).processor();
proc = proc.transformDown(l -> new HitExtractorInput(l.expression(), createExtractor(l.context())), ReferenceInput.class);
return new ComputingHitExtractor(proc.asProcessor());
}
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
@ -303,7 +334,8 @@ public class Scroller {
private static boolean needsHit(List<HitExtractor> exts) {
for (HitExtractor ext : exts) {
if (ext instanceof DocValueExtractor || ext instanceof ProcessingHitExtractor) {
// Anything non-constant requires extraction
if (!(ext instanceof ConstantExtractor)) {
return true;
}
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
import org.elasticsearch.xpack.sql.session.AbstractRowSetCursor;
import org.elasticsearch.xpack.sql.session.Cursor;
import org.elasticsearch.xpack.sql.session.RowSetCursor;

View File

@ -5,12 +5,8 @@
*/
package org.elasticsearch.xpack.sql.execution.search;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -18,27 +14,36 @@ import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
import org.elasticsearch.xpack.sql.querydsl.agg.Aggs;
import org.elasticsearch.xpack.sql.querydsl.container.AttributeSort;
import org.elasticsearch.xpack.sql.querydsl.container.ProcessingRef;
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
import org.elasticsearch.xpack.sql.querydsl.container.Reference;
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.ScriptSort;
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.Sort;
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import static java.util.Collections.singletonList;
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
import static org.elasticsearch.search.sort.SortBuilders.scriptSort;
@ -56,29 +61,20 @@ public abstract class SourceGenerator {
// translate fields to source-fields or script fields
Set<String> sourceFields = new LinkedHashSet<>();
Set<String> docFields = new LinkedHashSet<>();
for (Reference ref : container.refs()) {
if (ref instanceof ProcessingRef) {
ref = ((ProcessingRef) ref).ref();
}
Map<String, Script> scriptFields = new LinkedHashMap<>();
if (ref instanceof SearchHitFieldRef) {
SearchHitFieldRef sh = (SearchHitFieldRef) ref;
Set<String> collection = sh.useDocValue() ? docFields : sourceFields;
collection.add(ref.toString());
}
else if (ref instanceof ScriptFieldRef) {
ScriptFieldRef sfr = (ScriptFieldRef) ref;
source.scriptField(sfr.name(), sfr.script().toPainless());
}
for (ColumnReference ref : container.columns()) {
collectFields(ref, sourceFields, docFields, scriptFields);
}
if (!sourceFields.isEmpty()) {
source.fetchSource(sourceFields.toArray(new String[sourceFields.size()]), null);
}
if (!docFields.isEmpty()) {
for (String field : docFields) {
source.docValueField(field);
}
for (String field : docFields) {
source.docValueField(field);
}
for (Entry<String, Script> entry : scriptFields.entrySet()) {
source.scriptField(entry.getKey(), entry.getValue());
}
sorting(container, source);
@ -99,6 +95,22 @@ public abstract class SourceGenerator {
return source;
}
private static void collectFields(ColumnReference ref, Set<String> sourceFields, Set<String> docFields, Map<String, Script> scriptFields) {
if (ref instanceof ComputedRef) {
ProcessorDefinition proc = ((ComputedRef) ref).processor();
proc.forEachUp(l -> collectFields(l.context(), sourceFields, docFields, scriptFields), ReferenceInput.class);
}
else if (ref instanceof SearchHitFieldRef) {
SearchHitFieldRef sh = (SearchHitFieldRef) ref;
Set<String> collection = sh.useDocValue() ? docFields : sourceFields;
collection.add(sh.name());
}
else if (ref instanceof ScriptFieldRef) {
ScriptFieldRef sfr = (ScriptFieldRef) ref;
scriptFields.put(sfr.name(), sfr.script().toPainless());
}
}
private static void sorting(QueryContainer container, SearchSourceBuilder source) {
if (container.sort() != null) {

View File

@ -0,0 +1,82 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import java.io.IOException;
import java.util.Objects;
/**
* HitExtractor that delegates to a processor. The difference between this class
* and {@link HitExtractorProcessor} is that the latter is used inside a
* {@link Processor} tree as a leaf (and thus can effectively parse the
* {@link SearchHit} while this class is used when scrolling and passing down
* the results.
*
* In the future, the processor might be used across the board for all columns
* to reduce API complexity (and keep the {@link HitExtractor} only as an
* internal implementation detail).
*/
public class ComputingHitExtractor implements HitExtractor {
static final String NAME = "p";
private final Processor processor;
public ComputingHitExtractor(Processor processor) {
this.processor = processor;
}
ComputingHitExtractor(StreamInput in) throws IOException {
processor = in.readNamedWriteable(Processor.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(processor);
}
@Override
public String getWriteableName() {
return NAME;
}
public Processor processor() {
return processor;
}
@Override
public Object get(SearchHit hit) {
return processor.process(hit);
}
@Override
public String innerHitName() {
return null;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
ComputingHitExtractor other = (ComputingHitExtractor) obj;
return processor.equals(other.processor);
}
@Override
public int hashCode() {
return Objects.hash(processor);
}
@Override
public String toString() {
return processor.toString();
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -15,11 +15,11 @@ import java.util.Objects;
/**
* Returns the a constant for every search hit against which it is run.
*/
class ConstantExtractor implements HitExtractor {
public class ConstantExtractor implements HitExtractor {
static final String NAME = "c";
private final Object constant;
ConstantExtractor(Object constant) {
public ConstantExtractor(Object constant) {
this.constant = constant;
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
@ -15,11 +15,11 @@ import java.io.IOException;
/**
* Extracts field values from {@link SearchHit#field(String)}.
*/
class DocValueExtractor implements HitExtractor {
public class DocValueExtractor implements HitExtractor {
static final String NAME = "f";
private final String fieldName;
DocValueExtractor(String name) {
public DocValueExtractor(String name) {
this.fieldName = name;
}

View File

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.search.SearchHit;
/**
* Extracts a column value from a {@link SearchHit}.
*/
public interface HitExtractor extends NamedWriteable {
/**
* Extract the value from a hit.
*/
Object get(SearchHit hit);
/**
* Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise.
*/
@Nullable
String innerHitName();
}

View File

@ -3,45 +3,29 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.Processors;
import java.util.ArrayList;
import java.util.List;
/**
* Extracts a columns value from a {@link SearchHit}.
*/
public interface HitExtractor extends NamedWriteable {
public abstract class HitExtractors {
/**
* All of the named writeables needed to deserialize the instances
* of {@linkplain HitExtractor}.
* All of the named writeables needed to deserialize the instances of
* {@linkplain HitExtractor}.
*/
static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.add(new Entry(HitExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new));
entries.add(new Entry(HitExtractor.class, DocValueExtractor.NAME, DocValueExtractor::new));
entries.add(new Entry(HitExtractor.class, InnerHitExtractor.NAME, InnerHitExtractor::new));
entries.add(new Entry(HitExtractor.class, SourceExtractor.NAME, SourceExtractor::new));
entries.add(new Entry(HitExtractor.class, ProcessingHitExtractor.NAME, ProcessingHitExtractor::new));
entries.addAll(ColumnProcessor.getNamedWriteables());
entries.add(new Entry(HitExtractor.class, ComputingHitExtractor.NAME, ComputingHitExtractor::new));
entries.addAll(Processors.getNamedWriteables());
return entries;
}
/**
* Extract the value from a hit.
*/
Object get(SearchHit hit);
/**
* Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise.
*/
@Nullable
String innerHitName();
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.document.DocumentField;
@ -16,13 +16,13 @@ import java.io.IOException;
import java.util.Map;
import java.util.Objects;
class InnerHitExtractor implements HitExtractor {
public class InnerHitExtractor implements HitExtractor {
static final String NAME = "i";
private final String hitName, fieldName;
private final boolean useDocValue;
private final String[] tree;
InnerHitExtractor(String hitName, String name, boolean useDocValue) {
public InnerHitExtractor(String hitName, String name, boolean useDocValue) {
this.hitName = hitName;
this.fieldName = name;
this.useDocValue = useDocValue;

View File

@ -3,20 +3,20 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
import java.io.IOException;
import java.util.Map;
package org.elasticsearch.xpack.sql.execution.search.extractor;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchHit;
class SourceExtractor implements HitExtractor {
import java.io.IOException;
import java.util.Map;
public class SourceExtractor implements HitExtractor {
public static final String NAME = "s";
private final String fieldName;
SourceExtractor(String name) {
public SourceExtractor(String name) {
this.fieldName = name;
}

View File

@ -5,21 +5,15 @@
*/
package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.Arrays;
import java.util.Objects;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
public abstract class BinaryExpression extends Expression {
private final Expression left, right;
public interface Negateable {
BinaryExpression negate();
}
protected BinaryExpression(Location location, Expression left, Expression right) {
super(location, Arrays.asList(left, right));
this.left = left;
@ -44,13 +38,6 @@ public abstract class BinaryExpression extends Expression {
return left.nullable() || left.nullable();
}
public abstract BinaryExpression swapLeftAndRight();
@Override
public DataType dataType() {
return DataTypes.BOOLEAN;
}
@Override
public int hashCode() {
return Objects.hash(left, right);
@ -79,6 +66,7 @@ public abstract class BinaryExpression extends Expression {
return sb.toString();
}
// simplify toString
public abstract String symbol();
public abstract BinaryExpression swapLeftAndRight();
}

View File

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
public abstract class BinaryLogic extends BinaryOperator {
protected BinaryLogic(Location location, Expression left, Expression right) {
super(location, left, right);
}
@Override
public DataType dataType() {
return DataTypes.BOOLEAN;
}
@Override
protected TypeResolution resolveInputType(DataType inputType) {
return DataTypes.BOOLEAN.equals(inputType) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
"'%s' requires type %s not %s", symbol(), DataTypes.BOOLEAN.sqlName(), inputType.sqlName());
}
}

View File

@ -7,30 +7,33 @@ package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
//Binary expression that requires both input expressions to have the same type
//Compatible types should be handled by the analyzer (by using the narrowest type)
public abstract class BinaryOperator extends BinaryExpression {
public interface Negateable {
BinaryExpression negate();
}
protected BinaryOperator(Location location, Expression left, Expression right) {
super(location, left, right);
}
protected abstract DataType acceptedType();
protected abstract TypeResolution resolveInputType(DataType inputType);
@Override
protected TypeResolution resolveType() {
DataType accepted = acceptedType();
if (!childrenResolved()) {
return new TypeResolution("Unresolved children");
}
DataType l = left().dataType();
DataType r = right().dataType();
if (!l.same(r)) {
return new TypeResolution("Different types (%s and %s) used in '%s'", l.sqlName(), r.sqlName(), symbol());
}
if (!DataTypeConversion.canConvert(accepted, left().dataType())) {
return new TypeResolution("'%s' requires type %s not %s", symbol(), accepted.sqlName(), l.sqlName());
}
else {
return TypeResolution.TYPE_RESOLVED;
TypeResolution resolution = resolveInputType(l);
if (resolution == TypeResolution.TYPE_RESOLVED) {
return resolveInputType(r);
}
return resolution;
}
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.capabilities.Resolvable;
import org.elasticsearch.xpack.sql.capabilities.Resolvables;
import org.elasticsearch.xpack.sql.tree.Location;
@ -62,7 +63,7 @@ public abstract class Expression extends Node<Expression> implements Resolvable
}
public Object fold() {
return null;
throw new SqlIllegalArgumentException("Should not fold expression");
}
public abstract boolean nullable();

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
@ -68,4 +70,14 @@ public abstract class Expressions {
public static Attribute attribute(Expression e) {
return e instanceof NamedExpression ? ((NamedExpression) e).toAttribute() : null;
}
public static TypeResolution typeMustBe(Expression e, Predicate<Expression> predicate, String message) {
return predicate.test(e) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(message);
}
public static TypeResolution typeMustBeNumeric(Expression e) {
return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution(
"Argument required to be numeric ('%s' of type '%s')", Expressions.name(e), e.dataType().esName());
}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
import org.elasticsearch.xpack.sql.type.DataTypes;
import java.util.Objects;
@ -21,8 +22,8 @@ public class Literal extends LeafExpression {
public Literal(Location location, Object value, DataType dataType) {
super(location);
this.value = value;
this.dataType = dataType;
this.value = DataTypeConversion.convert(value, dataType);
}
public Object value() {

View File

@ -30,11 +30,6 @@ public abstract class UnaryExpression extends Expression {
return child.nullable();
}
@Override
public boolean foldable() {
return child.foldable();
}
@Override
public boolean resolved() {
return child.resolved();

View File

@ -40,11 +40,6 @@ public abstract class Function extends NamedExpression {
return name;
}
@Override
public boolean foldable() {
return false;
}
@Override
public boolean nullable() {
return false;

View File

@ -9,15 +9,8 @@ import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.ComposeProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import java.util.ArrayList;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
public abstract class Functions {
@ -25,8 +18,11 @@ public abstract class Functions {
return e instanceof AggregateFunction;
}
public static boolean isScalarFunction(Expression e) {
return e instanceof ScalarFunction;
public static boolean isUnaryScalarFunction(Expression e) {
if (e instanceof BinaryScalarFunction) {
throw new UnsupportedOperationException("not handled currently");
}
return e instanceof UnaryScalarFunction;
}
public static AggregateFunction extractAggregate(NamedExpression ne) {
@ -35,8 +31,11 @@ public abstract class Functions {
if (e instanceof Alias) {
e = ((Alias) ne).child();
}
else if (e instanceof ScalarFunction) {
e = ((ScalarFunction) e).argument();
else if (e instanceof UnaryScalarFunction) {
e = ((UnaryScalarFunction) e).field();
}
else if (e instanceof BinaryScalarFunction) {
throw new UnsupportedOperationException();
}
else if (e instanceof AggregateFunction) {
return (AggregateFunction) e;
@ -47,52 +46,4 @@ public abstract class Functions {
}
return null;
}
public static List<Expression> unwrapScalarFunctionWithTail(Expression e) {
if (!(e instanceof ScalarFunction)) {
return emptyList();
}
List<Expression> exps = new ArrayList<>();
while (isScalarFunction(e)) {
ScalarFunction scalar = (ScalarFunction) e;
exps.add(scalar);
e = scalar.argument();
}
exps.add(e);
return exps;
}
public static List<ScalarFunction> unwrapScalarProcessor(Expression e) {
if (!(e instanceof ScalarFunction)) {
return emptyList();
}
// common-case (single function wrapper)
if (e instanceof ScalarFunction && !(((ScalarFunction) e).argument() instanceof ScalarFunction)) {
return singletonList((ScalarFunction) e);
}
List<ScalarFunction> exps = new ArrayList<>();
while (e instanceof ScalarFunction) {
ScalarFunction scalar = (ScalarFunction) e;
exps.add(scalar);
e = scalar.argument();
}
return exps;
}
public static ColumnProcessor chainProcessors(List<Expression> unwrappedScalar) {
ColumnProcessor proc = null;
for (Expression e : unwrappedScalar) {
if (e instanceof ScalarFunction) {
ScalarFunction sf = (ScalarFunction) e;
// A(B(C)) is applied backwards first C then B then A, the last function first
proc = proc == null ? sf.asProcessor() : new ComposeProcessor(sf.asProcessor(), proc);
}
else {
return proc;
}
}
return proc;
}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
public class Avg extends NumericAggregate implements EnclosedAgg {
@ -18,4 +19,9 @@ public class Avg extends NumericAggregate implements EnclosedAgg {
public String innerName() {
return "avg";
}
@Override
public DataType dataType() {
return field().dataType();
}
}

View File

@ -25,10 +25,7 @@ class NumericAggregate extends AggregateFunction {
@Override
protected TypeResolution resolveType() {
return field().dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
"Function '%s' cannot be applied on a non-numeric expression ('%s' of type '%s')", functionName(),
Expressions.name(field()), field().dataType().esName());
return Expressions.typeMustBeNumeric(field());
}
@Override

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.aggregate;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Foldables;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
@ -27,9 +28,9 @@ public class Percentile extends NumericAggregate implements EnclosedAgg {
TypeResolution resolution = super.resolveType();
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
resolution = percent().dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED :
new TypeResolution("Percentile#percent argument cannot be non-numeric (type is'%s')", percent().dataType().esName());
resolution = Expressions.typeMustBeNumeric(percent());
}
return resolution;
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.aggregate;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Foldables;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
@ -27,9 +28,9 @@ public class PercentileRank extends AggregateFunction implements EnclosedAgg {
TypeResolution resolution = super.resolveType();
if (TypeResolution.TYPE_RESOLVED.equals(resolution)) {
resolution = value.dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED :
new TypeResolution("PercentileRank#value argument cannot be non-numeric (type is'%s')", value.dataType().esName());
resolution = Expressions.typeMustBeNumeric(value);
}
return resolution;
}

View File

@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.List;
import java.util.Objects;
public class PercentileRanks extends CompoundNumericAggregate {
@ -23,19 +22,4 @@ public class PercentileRanks extends CompoundNumericAggregate {
public List<Expression> values() {
return values;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PercentileRanks other = (PercentileRanks) obj;
return Objects.equals(field(), other.field())
&& Objects.equals(values, other.values);
}
}

View File

@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.List;
import java.util.Objects;
public class Percentiles extends CompoundNumericAggregate {
@ -23,19 +22,4 @@ public class Percentiles extends CompoundNumericAggregate {
public List<Expression> percents() {
return percents;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Percentiles other = (Percentiles) obj;
return Objects.equals(field(), other.field())
&& Objects.equals(percents, other.percents);
}
}

View File

@ -0,0 +1,98 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.Arrays;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class BinaryScalarFunction extends ScalarFunction {
private final Expression left, right;
protected BinaryScalarFunction(Location location, Expression left, Expression right) {
super(location, Arrays.asList(left, right));
this.left = left;
this.right = right;
}
public Expression left() {
return left;
}
public Expression right() {
return right;
}
public boolean foldable() {
return left.foldable() && right.foldable();
}
@Override
public ScalarFunctionAttribute toAttribute() {
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessor());
}
protected ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(left());
ScriptTemplate rightScript = asScript(right());
return asScriptFrom(leftScript, rightScript);
}
protected abstract ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript);
protected ScriptTemplate asScript(Expression exp) {
if (exp.foldable()) {
return asScriptFromFoldable(exp);
}
Attribute attr = Expressions.attribute(exp);
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", exp);
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
return scalar.script();
}
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
aggregate.dataType());
}
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(formatTemplate("doc[{}].value"),
paramsBuilder().variable(field.name()).build(),
field.dataType());
}
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().variable(foldable.fold()).build(),
foldable.dataType());
}
}

View File

@ -8,6 +8,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
@ -19,17 +22,17 @@ import java.util.Objects;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public class Cast extends ScalarFunction {
public class Cast extends UnaryScalarFunction {
private final DataType dataType;
public Cast(Location location, Expression argument, DataType dataType) {
super(location, argument);
public Cast(Location location, Expression field, DataType dataType) {
super(location, field);
this.dataType = dataType;
}
public DataType from() {
return argument().dataType();
return field().dataType();
}
public DataType to() {
@ -41,9 +44,19 @@ public class Cast extends ScalarFunction {
return dataType;
}
@Override
public boolean foldable() {
return field().foldable();
}
@Override
public Object fold() {
return DataTypeConversion.convert(field().fold(), dataType);
}
@Override
public boolean nullable() {
return argument().nullable() || DataTypeConversion.nullable(from(), to());
return field().nullable() || DataTypeConversion.nullable(from());
}
@Override
@ -77,8 +90,8 @@ public class Cast extends ScalarFunction {
}
@Override
public ColumnProcessor asProcessor() {
return new CastProcessor(DataTypeConversion.conversionFor(from(), to()));
protected ProcessorDefinition makeProcessor() {
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new CastProcessor(DataTypeConversion.conversionFor(from(), to())));
}
@Override
@ -88,6 +101,6 @@ public class Cast extends ScalarFunction {
@Override
public String toString() {
return functionName() + "(" + argument().toString() + " AS " + to().sqlName() + ")#" + id();
return functionName() + "(" + field().toString() + " AS " + to().sqlName() + ")#" + id();
}
}

View File

@ -7,35 +7,39 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion;
import java.io.IOException;
import java.util.Objects;
public class CastProcessor implements Processor {
public static final String NAME = "ca";
public class CastProcessor implements ColumnProcessor {
public static final String NAME = "c";
private final Conversion conversion;
CastProcessor(Conversion conversion) {
public CastProcessor(Conversion conversion) {
this.conversion = conversion;
}
CastProcessor(StreamInput in) throws IOException {
public CastProcessor(StreamInput in) throws IOException {
conversion = in.readEnum(Conversion.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(conversion);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object apply(Object r) {
return conversion.convert(r);
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(conversion);
}
@Override
public Object process(Object input) {
return conversion.convert(input);
}
Conversion converter() {
@ -44,20 +48,25 @@ public class CastProcessor implements ColumnProcessor {
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
CastProcessor other = (CastProcessor) obj;
return conversion.equals(other.conversion);
return Objects.equals(conversion, other.conversion);
}
@Override
public int hashCode() {
return conversion.hashCode();
return Objects.hash(conversion);
}
@Override
public String toString() {
return conversion.toString();
return conversion.name();
}
}

View File

@ -1,32 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import java.util.ArrayList;
import java.util.List;
public interface ColumnProcessor extends NamedWriteable {
/**
* All of the named writeables needed to deserialize the instances
* of {@linkplain ColumnProcessor}.
*/
static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, CastProcessor.NAME, CastProcessor::new));
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, ComposeProcessor.NAME, ComposeProcessor::new));
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class,
MathFunctionProcessor.NAME, MathFunctionProcessor::new));
entries.add(new NamedWriteableRegistry.Entry(ColumnProcessor.class,
MatrixFieldProcessor.NAME, MatrixFieldProcessor::new));
return entries;
}
Object apply(Object r);
}

View File

@ -1,77 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Objects;
/**
* A {@linkplain ColumnProcessor} that composes the results of two
* {@linkplain ColumnProcessor}s.
*/
public class ComposeProcessor implements ColumnProcessor {
static final String NAME = ".";
private final ColumnProcessor first;
private final ColumnProcessor second;
public ComposeProcessor(ColumnProcessor first, ColumnProcessor second) {
this.first = first;
this.second = second;
}
public ComposeProcessor(StreamInput in) throws IOException {
first = in.readNamedWriteable(ColumnProcessor.class);
second = in.readNamedWriteable(ColumnProcessor.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(first);
out.writeNamedWriteable(second);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object apply(Object r) {
return second.apply(first.apply(r));
}
ColumnProcessor first() {
return first;
}
ColumnProcessor second() {
return second;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
ComposeProcessor other = (ComposeProcessor) obj;
return first.equals(other.first)
&& second.equals(other.second);
}
@Override
public int hashCode() {
return Objects.hash(first, second);
}
@Override
public String toString() {
// borrow Haskell's notation for function comosition
return "(" + second + " . " + first + ")";
}
}

View File

@ -1,64 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor;
import java.io.IOException;
public class MathFunctionProcessor implements ColumnProcessor {
public static final String NAME = "m";
private final MathProcessor processor;
public MathFunctionProcessor(MathProcessor processor) {
this.processor = processor;
}
MathFunctionProcessor(StreamInput in) throws IOException {
processor = in.readEnum(MathProcessor.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(processor);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object apply(Object r) {
return processor.apply(r);
}
MathProcessor processor() {
return processor;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
MathFunctionProcessor other = (MathFunctionProcessor) obj;
return processor == other.processor;
}
@Override
public int hashCode() {
return processor.hashCode();
}
@Override
public String toString() {
return processor.toString();
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.MatrixFieldProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import java.util.ArrayList;
import java.util.List;
public abstract class Processors {
/**
* All of the named writeables needed to deserialize the instances of
* {@linkplain Processors}.
*/
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
// base
entries.add(new Entry(Processor.class, ConstantProcessor.NAME, ConstantProcessor::new));
entries.add(new Entry(Processor.class, HitExtractorProcessor.NAME, HitExtractorProcessor::new));
entries.add(new Entry(Processor.class, CastProcessor.NAME, CastProcessor::new));
entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new));
entries.add(new Entry(Processor.class, MatrixFieldProcessor.NAME, MatrixFieldProcessor::new));
// arithmetic
entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new));
entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new));
// datetime
entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new));
// math
entries.add(new Entry(Processor.class, MathProcessor.NAME, MathProcessor::new));
return entries;
}
}

View File

@ -5,83 +5,41 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import java.util.List;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static java.util.Collections.emptyList;
public abstract class ScalarFunction extends Function {
private final Expression argument;
private ProcessorDefinition lazyProcessor = null;
protected ScalarFunction(Location location) {
super(location, emptyList());
this.argument = null;
}
protected ScalarFunction(Location location, Expression child) {
super(location, singletonList(child));
this.argument = child;
}
public Expression argument() {
return argument;
protected ScalarFunction(Location location, List<Expression> fields) {
super(location, fields);
}
@Override
public ScalarFunctionAttribute toAttribute() {
String functionId = null;
Attribute attr = Expressions.attribute(argument());
public abstract ScalarFunctionAttribute toAttribute();
if (attr instanceof AggregateFunctionAttribute) {
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) attr;
functionId = afa.functionId();
protected abstract ScriptTemplate asScript();
public ProcessorDefinition asProcessor() {
if (lazyProcessor == null) {
lazyProcessor = makeProcessor();
}
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), functionId);
return lazyProcessor;
}
protected ScriptTemplate asScript() {
Attribute attr = Expressions.attribute(argument());
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", argument());
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
Params p = paramsBuilder().script(nested.params()).build();
return new ScriptTemplate(chainScalarTemplate(nested.template()), p, dataType());
}
protected abstract ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate);
protected abstract ScriptTemplate asScriptFrom(FieldAttribute field);
protected abstract String chainScalarTemplate(String template);
public abstract ColumnProcessor asProcessor();
protected abstract ProcessorDefinition makeProcessor();
// used if the function is monotonic and thus does not have to be computed for ordering purposes
public Expression orderBy() {

View File

@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
@ -17,17 +18,17 @@ public class ScalarFunctionAttribute extends TypedAttribute {
private final ScriptTemplate script;
private final Expression orderBy;
private final String functionId;
private final ProcessorDefinition processorDef;
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, ScriptTemplate script, Expression orderBy, String functionId) {
this(location, name, dataType, null, true, id, false, script, orderBy, functionId);
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
this(location, name, dataType, null, true, id, false, script, orderBy, processorDef);
}
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, ScriptTemplate script, Expression orderBy, String functionId) {
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
super(location, name, dataType, qualifier, nullable, id, synthetic);
this.script = script;
this.orderBy = orderBy;
this.functionId = functionId;
this.processorDef = processorDef;
}
public ScriptTemplate script() {
@ -38,18 +39,18 @@ public class ScalarFunctionAttribute extends TypedAttribute {
return orderBy;
}
public String functionId() {
return functionId;
public ProcessorDefinition processorDef() {
return processorDef;
}
@Override
protected Expression canonicalize() {
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, script, orderBy, functionId);
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, script, orderBy, processorDef);
}
@Override
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, script, orderBy, functionId);
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, script, orderBy, processorDef);
}
@Override

View File

@ -0,0 +1,95 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import static java.util.Collections.singletonList;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class UnaryScalarFunction extends ScalarFunction {
private final Expression field;
protected UnaryScalarFunction(Location location) {
super(location);
this.field = null;
}
protected UnaryScalarFunction(Location location, Expression field) {
super(location, singletonList(field));
this.field = field;
}
public Expression field() {
return field;
}
@Override
public boolean foldable() {
return field.foldable();
}
@Override
public ScalarFunctionAttribute toAttribute() {
String functionId = null;
Attribute attr = Expressions.attribute(field());
if (attr instanceof AggregateFunctionAttribute) {
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) attr;
functionId = afa.functionId();
}
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessor());
}
protected ScriptTemplate asScript() {
if (field.foldable()) {
return asScriptFromFoldable(field);
}
Attribute attr = Expressions.attribute(field());
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", field());
}
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().variable(foldable.fold()).build(),
foldable.dataType());
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
Params p = paramsBuilder().script(nested.params()).build();
return new ScriptTemplate(chainScalarTemplate(nested.template()), p, dataType());
}
protected abstract ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate);
protected abstract ScriptTemplate asScriptFrom(FieldAttribute field);
protected abstract String chainScalarTemplate(String template);
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Add extends ArithmeticFunction {
public Add(Location location, Expression left, Expression right) {
super(location, left, right, BinaryArithmeticOperation.ADD);
}
@Override
public Number fold() {
return Arithmetics.add((Number) left().fold(), (Number) right().fold());
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import java.util.Locale;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
public abstract class ArithmeticFunction extends BinaryScalarFunction {
private BinaryArithmeticOperation operation;
ArithmeticFunction(Location location, Expression left, Expression right, BinaryArithmeticOperation operation) {
super(location, left, right);
this.operation = operation;
}
public BinaryArithmeticOperation operation() {
return operation;
}
@Override
public DataType dataType() {
// left or right have to be compatible so either one works
return left().dataType();
}
@Override
protected TypeResolution resolveType() {
if (!childrenResolved()) {
return new TypeResolution("Unresolved children");
}
DataType l = left().dataType();
DataType r = right().dataType();
TypeResolution resolution = resolveInputType(l);
if (resolution == TypeResolution.TYPE_RESOLVED) {
return resolveInputType(r);
}
return resolution;
}
protected TypeResolution resolveInputType(DataType inputType) {
return inputType.isNumeric() ? TypeResolution.TYPE_RESOLVED
: new TypeResolution("'%s' requires a numeric type, not %s", operation, inputType.sqlName());
}
@Override
protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) {
return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), operation.symbol(), rightScript.template()),
paramsBuilder().script(leftScript.params()).script(rightScript.params()).build(),
dataType());
}
protected final BinaryArithmeticProcessorDefinition makeProcessor() {
return new BinaryArithmeticProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(left()), ProcessorDefinitions.toProcessorDefinition(right()), operation);
}
@Override
public String name() {
return toString();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(left());
if (!(left() instanceof Literal)) {
sb.insert(0, "(");
sb.append(")");
}
sb.append(" ");
sb.append(operation);
sb.append(" ");
int pos = sb.length();
sb.append(right());
if (!(right() instanceof Literal)) {
sb.insert(pos, "(");
sb.append(")");
}
return sb.toString();
}
protected boolean useParanthesis() {
return !(left() instanceof Literal) || !(right() instanceof Literal);
}
}

View File

@ -0,0 +1,105 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
/**
* Arithmetic operation using the type widening rules of the JLS 5.6.2 namely
* widen to double or float or long or int in this order.
*/
abstract class Arithmetics {
static Number add(Number l, Number r) {
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() + r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() + r.floatValue());
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.addExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.addExact(l.intValue(), r.intValue()));
}
static Number sub(Number l, Number r) {
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() - r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() - r.floatValue());
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.subtractExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.subtractExact(l.intValue(), r.intValue()));
}
static Number mul(Number l, Number r) {
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() * r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() * r.floatValue());
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.multiplyExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.multiplyExact(l.intValue(), r.intValue()));
}
static Number div(Number l, Number r) {
if (l instanceof Double || r instanceof Double) {
return l.doubleValue() / r.doubleValue();
}
if (l instanceof Float || r instanceof Float) {
return l.floatValue() / r.floatValue();
}
if (l instanceof Long || r instanceof Long) {
return l.longValue() / r.longValue();
}
return l.intValue() / r.intValue();
}
static Number mod(Number l, Number r) {
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.floorMod(l.longValue(), r.longValue()));
}
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() % r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() % r.floatValue());
}
return Math.floorMod(l.intValue(), r.intValue());
}
static Number negate(Number n) {
if (n instanceof Double) {
double d = n.doubleValue();
if (d == Double.MIN_VALUE) {
throw new ArithmeticException("double overflow");
}
return Double.valueOf(-n.doubleValue());
}
if (n instanceof Float) {
float f = n.floatValue();
if (f == Float.MIN_VALUE) {
throw new ArithmeticException("float overflow");
}
return Float.valueOf(-n.floatValue());
}
if (n instanceof Long) {
return Long.valueOf(Math.negateExact(n.longValue()));
}
return Integer.valueOf(Math.negateExact(n.intValue()));
}
}

View File

@ -0,0 +1,106 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BinaryProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.function.BiFunction;
import static java.lang.String.format;
public class BinaryArithmeticProcessor extends BinaryProcessor {
public enum BinaryArithmeticOperation {
ADD(Arithmetics::add, "+"),
SUB(Arithmetics::sub, "-"),
MUL(Arithmetics::mul, "*"),
DIV(Arithmetics::div, "/"),
MOD(Arithmetics::mod, "%");
private final BiFunction<Number, Number, Number> process;
private final String symbol;
BinaryArithmeticOperation(BiFunction<Number, Number, Number> process, String symbol) {
this.process = process;
this.symbol = symbol;
}
public String symbol() {
return symbol;
}
public final Number apply(Number left, Number right) {
return process.apply(left, right);
}
@Override
public String toString() {
return symbol;
}
}
public static final String NAME = "ab";
private final BinaryArithmeticOperation operation;
public BinaryArithmeticProcessor(Processor left, Processor right, BinaryArithmeticOperation operation) {
super(left, right);
this.operation = operation;
}
public BinaryArithmeticProcessor(StreamInput in) throws IOException {
super(in);
operation = in.readEnum(BinaryArithmeticOperation.class);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected void doWrite(StreamOutput out) throws IOException {
out.writeEnum(operation);
}
@Override
protected Object doProcess(Object left, Object right) {
return operation.apply((Number) left, (Number) right);
}
@Override
public int hashCode() {
return operation.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BinaryArithmeticProcessor other = (BinaryArithmeticProcessor) obj;
return Objects.equals(operation, other.operation)
&& Objects.equals(left(), other.left())
&& Objects.equals(right(), other.right());
}
@Override
public String toString() {
return format(Locale.ROOT, "(%s %s %s)", left(), operation, right());
}
}

View File

@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import java.util.Objects;
public class BinaryArithmeticProcessorDefinition extends BinaryProcessorDefinition {
private final BinaryArithmeticOperation operation;
public BinaryArithmeticProcessorDefinition(Expression expression, ProcessorDefinition left, ProcessorDefinition right, BinaryArithmeticOperation operation) {
super(expression, left, right);
this.operation = operation;
}
public BinaryArithmeticOperation operation() {
return operation;
}
@Override
public BinaryArithmeticProcessor asProcessor() {
return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation);
}
@Override
public int hashCode() {
return Objects.hash(left(), right(), operation);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BinaryArithmeticProcessorDefinition other = (BinaryArithmeticProcessorDefinition) obj;
return Objects.equals(operation, other.operation)
&& Objects.equals(left(), other.left())
&& Objects.equals(right(), other.right());
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
public class Div extends ArithmeticFunction {
public Div(Location location, Expression left, Expression right) {
super(location, left, right, BinaryArithmeticOperation.DIV);
}
@Override
public Object fold() {
return Arithmetics.div((Number) left().fold(), (Number) right().fold());
}
@Override
public DataType dataType() {
return DataTypeConversion.commonType(left().dataType(), right().dataType());
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Mod extends ArithmeticFunction {
public Mod(Location location, Expression left, Expression right) {
super(location, left, right, BinaryArithmeticOperation.MOD);
}
@Override
public Object fold() {
return Arithmetics.mod((Number) left().fold(), (Number) right().fold());
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Mul extends ArithmeticFunction {
public Mul(Location location, Expression left, Expression right) {
super(location, left, right, BinaryArithmeticOperation.MUL);
}
@Override
public Object fold() {
return Arithmetics.mul((Number) left().fold(), (Number) right().fold());
}
}

View File

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public class Neg extends UnaryScalarFunction {
public Neg(Location location, Expression field) {
super(location, field);
}
@Override
protected TypeResolution resolveType() {
return Expressions.typeMustBeNumeric(field());
}
@Override
public Object fold() {
return Arithmetics.negate((Number) field().fold());
}
@Override
public DataType dataType() {
return field().dataType();
}
@Override
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
dataType());
}
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(formatTemplate("doc[{}].value"),
paramsBuilder().variable(field.name()).build(),
dataType());
}
@Override
protected String chainScalarTemplate(String template) {
return template;
}
@Override
protected ProcessorDefinition makeProcessor() {
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new UnaryArithmeticProcessor(UnaryArithmeticOperation.NEGATE));
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Sub extends ArithmeticFunction {
public Sub(Location location, Expression left, Expression right) {
super(location, left, right, BinaryArithmeticOperation.SUB);
}
@Override
public Object fold() {
return Arithmetics.sub((Number) left().fold(), (Number) right().fold());
}
}

View File

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import java.io.IOException;
import java.util.function.Function;
public class UnaryArithmeticProcessor implements Processor {
public enum UnaryArithmeticOperation {
NEGATE(Arithmetics::negate);
private final Function<Number, Number> process;
UnaryArithmeticOperation(Function<Number, Number> process) {
this.process = process;
}
public final Number apply(Number number) {
return process.apply(number);
}
public String symbol() {
return "-";
}
}
public static final String NAME = "au";
private final UnaryArithmeticOperation operation;
public UnaryArithmeticProcessor(UnaryArithmeticOperation operation) {
this.operation = operation;
}
public UnaryArithmeticProcessor(StreamInput in) throws IOException {
operation = in.readEnum(UnaryArithmeticOperation.class);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(operation);
}
@Override
public Object process(Object input) {
if (input instanceof Number) {
return operation.apply((Number) input);
}
throw new SqlIllegalArgumentException("A number is required; received %s", input);
}
@Override
public String toString() {
return operation.symbol() + super.toString();
}
}

View File

@ -1,35 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.ReadableDateTime;
/**
* Extracts portions of {@link ReadableDateTime}s. Note that the position in the enum is used for serialization.
*/
public enum DateTimeExtractor {
DAY_OF_MONTH(DateTimeFieldType.dayOfMonth()),
DAY_OF_WEEK(DateTimeFieldType.dayOfWeek()),
DAY_OF_YEAR(DateTimeFieldType.dayOfYear()),
HOUR_OF_DAY(DateTimeFieldType.hourOfDay()),
MINUTE_OF_DAY(DateTimeFieldType.minuteOfDay()),
MINUTE_OF_HOUR(DateTimeFieldType.minuteOfHour()),
MONTH_OF_YEAR(DateTimeFieldType.monthOfYear()),
SECOND_OF_MINUTE(DateTimeFieldType.secondOfMinute()),
WEEK_OF_YEAR(DateTimeFieldType.weekOfWeekyear()),
YEAR(DateTimeFieldType.year());
private final DateTimeFieldType field;
DateTimeExtractor(DateTimeFieldType field) {
this.field = field;
}
public int extract(ReadableDateTime dt) {
return dt.get(field);
}
}

View File

@ -10,9 +10,11 @@ import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.aware.TimeZoneAware;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.DateTimeProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
@ -26,12 +28,12 @@ import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class DateTimeFunction extends ScalarFunction implements TimeZoneAware {
public abstract class DateTimeFunction extends UnaryScalarFunction implements TimeZoneAware {
private final DateTimeZone timeZone;
public DateTimeFunction(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument);
public DateTimeFunction(Location location, Expression field, DateTimeZone timeZone) {
super(location, field);
this.timeZone = timeZone;
}
@ -39,11 +41,15 @@ public abstract class DateTimeFunction extends ScalarFunction implements TimeZon
return timeZone;
}
public boolean foldable() {
return field().foldable();
}
@Override
protected TypeResolution resolveType() {
return argument().dataType().same(DataTypes.DATE) ?
return field().dataType().same(DataTypes.DATE) ?
TypeResolution.TYPE_RESOLVED :
new TypeResolution("Function '%s' cannot be applied on a non-date expression ('%s' of type '%s')", functionName(), Expressions.name(argument()), argument().dataType().esName());
new TypeResolution("Function '%s' cannot be applied on a non-date expression ('%s' of type '%s')", functionName(), Expressions.name(field()), field().dataType().esName());
}
@Override
@ -84,9 +90,8 @@ public abstract class DateTimeFunction extends ScalarFunction implements TimeZon
return getClass().getSimpleName();
}
@Override
public final ColumnProcessor asProcessor() {
return new DateTimeProcessor(extractor(), timeZone);
protected final ProcessorDefinition makeProcessor() {
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new DateTimeProcessor(extractor(), timeZone));
}
protected abstract DateTimeExtractor extractor();

View File

@ -3,19 +3,45 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeExtractor;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import org.joda.time.DateTime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import java.io.IOException;
import java.util.Objects;
public class DateTimeProcessor implements ColumnProcessor {
public static final String NAME = "d";
public class DateTimeProcessor implements Processor {
public enum DateTimeExtractor {
DAY_OF_MONTH(DateTimeFieldType.dayOfMonth()),
DAY_OF_WEEK(DateTimeFieldType.dayOfWeek()),
DAY_OF_YEAR(DateTimeFieldType.dayOfYear()),
HOUR_OF_DAY(DateTimeFieldType.hourOfDay()),
MINUTE_OF_DAY(DateTimeFieldType.minuteOfDay()),
MINUTE_OF_HOUR(DateTimeFieldType.minuteOfHour()),
MONTH_OF_YEAR(DateTimeFieldType.monthOfYear()),
SECOND_OF_MINUTE(DateTimeFieldType.secondOfMinute()),
WEEK_OF_YEAR(DateTimeFieldType.weekOfWeekyear()),
YEAR(DateTimeFieldType.year());
private final DateTimeFieldType field;
DateTimeExtractor(DateTimeFieldType field) {
this.field = field;
}
public int extract(ReadableDateTime dt) {
return dt.get(field);
}
}
public static final String NAME = "dt";
private final DateTimeExtractor extractor;
private final DateTimeZone timeZone;
@ -25,7 +51,7 @@ public class DateTimeProcessor implements ColumnProcessor {
this.timeZone = timeZone;
}
DateTimeProcessor(StreamInput in) throws IOException {
public DateTimeProcessor(StreamInput in) throws IOException {
extractor = in.readEnum(DateTimeExtractor.class);
timeZone = DateTimeZone.forID(in.readString());
}
@ -46,7 +72,11 @@ public class DateTimeProcessor implements ColumnProcessor {
}
@Override
public Object apply(Object l) {
public Object process(Object l) {
if (l == null) {
return null;
}
ReadableDateTime dt;
// most dates are returned as long
if (l instanceof Long) {
@ -55,28 +85,29 @@ public class DateTimeProcessor implements ColumnProcessor {
else {
dt = (ReadableDateTime) l;
}
if (!timeZone.getID().equals("UTC")) {
if (!DateTimeZone.UTC.equals(timeZone)) {
dt = dt.toDateTime().withZone(timeZone);
}
return extractor.extract(dt);
}
@Override
public int hashCode() {
return Objects.hash(extractor, timeZone);
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
DateTimeProcessor other = (DateTimeProcessor) obj;
return extractor == other.extractor;
}
@Override
public int hashCode() {
return extractor.hashCode();
return Objects.equals(extractor, other.extractor)
&& Objects.equals(timeZone, other.timeZone);
}
@Override
public String toString() {
return extractor.toString();
}
}
}

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class DayOfMonth extends DateTimeFunction {
public DayOfMonth(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public DayOfMonth(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class DayOfWeek extends DateTimeFunction {
public DayOfWeek(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public DayOfWeek(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class DayOfYear extends DateTimeFunction {
public DayOfYear(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public DayOfYear(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class HourOfDay extends DateTimeFunction {
public HourOfDay(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public HourOfDay(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
@ -13,8 +14,8 @@ import java.time.temporal.ChronoField;
public class MinuteOfDay extends DateTimeFunction {
public MinuteOfDay(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public MinuteOfDay(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class MonthOfYear extends DateTimeFunction {
public MonthOfYear(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public MonthOfYear(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class SecondOfMinute extends DateTimeFunction {
public SecondOfMinute(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public SecondOfMinute(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class WeekOfWeekYear extends DateTimeFunction {
public WeekOfWeekYear(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public WeekOfWeekYear(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override

View File

@ -6,14 +6,15 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.tree.Location;
import org.joda.time.DateTimeZone;
import java.time.temporal.ChronoField;
public class Year extends DateTimeFunction {
public Year(Location location, Expression argument, DateTimeZone timeZone) {
super(location, argument, timeZone);
public Year(Location location, Expression field, DateTimeZone timeZone) {
super(location, field, timeZone);
}
@Override
@ -28,7 +29,7 @@ public class Year extends DateTimeFunction {
@Override
public Expression orderBy() {
return argument();
return field();
}
@Override
@ -39,5 +40,5 @@ public class Year extends DateTimeFunction {
@Override
protected DateTimeExtractor extractor() {
return DateTimeExtractor.YEAR;
}
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class ACos extends MathFunction {
public ACos(Location location, Expression argument) {
super(location, argument);
public ACos(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.ACOS;
protected MathOperation operation() {
return MathOperation.ACOS;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class ASin extends MathFunction {
public ASin(Location location, Expression argument) {
super(location, argument);
public ASin(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.ASIN;
protected MathOperation operation() {
return MathOperation.ASIN;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class ATan extends MathFunction {
public ATan(Location location, Expression argument) {
super(location, argument);
public ATan(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.ATAN;
protected MathOperation operation() {
return MathOperation.ATAN;
}
}

View File

@ -6,21 +6,22 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
public class Abs extends MathFunction {
public Abs(Location location, Expression argument) {
super(location, argument);
public Abs(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.ABS;
protected MathOperation operation() {
return MathOperation.ABS;
}
@Override
public DataType dataType() {
return argument().dataType();
return field().dataType();
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Cbrt extends MathFunction {
public Cbrt(Location location, Expression argument) {
super(location, argument);
public Cbrt(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.CBRT;
protected MathOperation operation() {
return MathOperation.CBRT;
}
}

View File

@ -6,15 +6,23 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
public class Ceil extends MathFunction {
public Ceil(Location location, Expression argument) {
super(location, argument);
public Ceil(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.CEIL;
protected MathOperation operation() {
return MathOperation.CEIL;
}
@Override
public DataType dataType() {
return DataTypeConversion.asInteger(field().dataType());
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Cos extends MathFunction {
public Cos(Location location, Expression argument) {
super(location, argument);
public Cos(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.COS;
protected MathOperation operation() {
return MathOperation.COS;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Cosh extends MathFunction {
public Cosh(Location location, Expression argument) {
super(location, argument);
public Cosh(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.COSH;
protected MathOperation operation() {
return MathOperation.COSH;
}
}

View File

@ -6,11 +6,12 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Degrees extends MathFunction {
public Degrees(Location location, Expression argument) {
super(location, argument);
public Degrees(Location location, Expression field) {
super(location, field);
}
@Override
@ -19,7 +20,7 @@ public class Degrees extends MathFunction {
}
@Override
protected MathProcessor processor() {
return MathProcessor.DEGREES;
protected MathOperation operation() {
return MathOperation.DEGREES;
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.util.StringUtils;
@ -30,7 +31,7 @@ public class E extends MathFunction {
}
@Override
protected MathProcessor processor() {
return MathProcessor.E;
protected MathOperation operation() {
return MathOperation.E;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Exp extends MathFunction {
public Exp(Location location, Expression argument) {
super(location, argument);
public Exp(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.EXP;
protected MathOperation operation() {
return MathOperation.EXP;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Expm1 extends MathFunction {
public Expm1(Location location, Expression argument) {
super(location, argument);
public Expm1(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.EXPM1;
protected MathOperation operation() {
return MathOperation.EXPM1;
}
}

View File

@ -6,15 +6,23 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
public class Floor extends MathFunction {
public Floor(Location location, Expression argument) {
super(location, argument);
public Floor(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.FLOOR;
protected MathOperation operation() {
return MathOperation.FLOOR;
}
@Override
public DataType dataType() {
return DataTypeConversion.asInteger(field().dataType());
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Log extends MathFunction {
public Log(Location location, Expression argument) {
super(location, argument);
public Log(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.LOG;
protected MathOperation operation() {
return MathOperation.LOG;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Log10 extends MathFunction {
public Log10(Location location, Expression argument) {
super(location, argument);
public Log10(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.LOG10;
protected MathOperation operation() {
return MathOperation.LOG10;
}
}

View File

@ -8,10 +8,12 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.ColumnProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.MathFunctionProcessor;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
@ -23,27 +25,27 @@ import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class MathFunction extends ScalarFunction {
public abstract class MathFunction extends UnaryScalarFunction {
protected MathFunction(Location location) {
super(location);
}
protected MathFunction(Location location, Expression argument) {
super(location, argument);
protected MathFunction(Location location, Expression field) {
super(location, field);
}
public boolean foldable() {
return argument().foldable();
return field().foldable();
}
@Override
protected String chainScalarTemplate(String template) {
return createTemplate(template);
}
@Override
// TODO: isn't chain Scalar Template enough?
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
return new ScriptTemplate(createTemplate(nested.template()),
@ -79,9 +81,9 @@ public abstract class MathFunction extends ScalarFunction {
}
@Override
public final ColumnProcessor asProcessor() {
return new MathFunctionProcessor(processor());
protected final ProcessorDefinition makeProcessor() {
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new MathProcessor(operation()));
}
protected abstract MathProcessor processor();
protected abstract MathOperation operation();
}

View File

@ -5,56 +5,112 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor;
import java.io.IOException;
import java.util.function.DoubleFunction;
import java.util.function.Function;
/**
* Applies a math function. Note that the order of the enum constants is used for serialization.
*/
public enum MathProcessor {
ABS((Object l) -> {
if (l instanceof Float) {
return Math.abs(((Float) l).floatValue());
public class MathProcessor implements Processor {
public enum MathOperation {
ABS((Object l) -> {
if (l instanceof Float) {
return Math.abs(((Float) l).floatValue());
}
if (l instanceof Double) {
return Math.abs(((Double) l).doubleValue());
}
long lo = ((Number) l).longValue();
return lo >= 0 ? lo : lo == Long.MIN_VALUE ? Long.MAX_VALUE : -lo;
}),
ACOS(Math::acos),
ASIN(Math::asin),
ATAN(Math::atan),
CBRT(Math::cbrt),
CEIL(Math::ceil),
COS(Math::cos),
COSH(Math::cosh),
DEGREES(Math::toDegrees),
E((Object l) -> Math.E),
EXP(Math::exp),
EXPM1(Math::expm1),
FLOOR(Math::floor),
LOG(Math::log),
LOG10(Math::log10),
PI((Object l) -> Math.PI),
RADIANS(Math::toRadians),
ROUND((DoubleFunction<Object>) Math::round),
SIN(Math::sin),
SINH(Math::sinh),
SQRT(Math::sqrt),
TAN(Math::tan);
private final Function<Object, Object> apply;
MathOperation(Function<Object, Object> apply) {
this.apply = apply;
}
if (l instanceof Double) {
return Math.abs(((Double) l).doubleValue());
MathOperation(DoubleFunction<Object> apply) {
this.apply = (Object l) -> apply.apply(((Number) l).doubleValue());
}
long lo = ((Number) l).longValue();
return lo >= 0 ? lo : lo == Long.MIN_VALUE ? Long.MAX_VALUE : -lo;
}),
ACOS(fromDouble(Math::acos)),
ASIN(fromDouble(Math::asin)),
ATAN(fromDouble(Math::atan)),
CBRT(fromDouble(Math::cbrt)),
CEIL(fromDouble(Math::ceil)),
COS(fromDouble(Math::cos)),
COSH(fromDouble(Math::cosh)),
DEGREES(fromDouble(Math::toDegrees)),
E((Object l) -> Math.E),
EXP(fromDouble(Math::exp)),
EXPM1(fromDouble(Math::expm1)),
FLOOR(fromDouble(Math::floor)),
LOG(fromDouble(Math::log)),
LOG10(fromDouble(Math::log10)),
PI((Object l) -> Math.PI),
RADIANS(fromDouble(Math::toRadians)),
ROUND(fromDouble(Math::round)),
SIN(fromDouble(Math::sin)),
SINH(fromDouble(Math::sinh)),
SQRT(fromDouble(Math::sqrt)),
TAN(fromDouble(Math::tan));
private final Function<Object, Object> apply;
public final Object apply(Object l) {
return apply.apply(l);
}
}
public static final String NAME = "m";
MathProcessor(Function<Object, Object> apply) {
this.apply = apply;
private final MathOperation processor;
public MathProcessor(MathOperation processor) {
this.processor = processor;
}
private static Function<Object, Object> fromDouble(DoubleFunction<Object> apply) {
return (Object l) -> apply.apply(((Number) l).doubleValue());
public MathProcessor(StreamInput in) throws IOException {
processor = in.readEnum(MathOperation.class);
}
public final Object apply(Object l) {
return apply.apply(l);
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(processor);
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object process(Object input) {
return processor.apply(input);
}
MathOperation processor() {
return processor;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
MathProcessor other = (MathProcessor) obj;
return processor == other.processor;
}
@Override
public int hashCode() {
return processor.hashCode();
}
@Override
public String toString() {
return processor.toString();
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.util.StringUtils;
@ -30,7 +31,7 @@ public class Pi extends MathFunction {
}
@Override
protected MathProcessor processor() {
return MathProcessor.PI;
protected MathOperation operation() {
return MathOperation.PI;
}
}

View File

@ -6,11 +6,12 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Radians extends MathFunction {
public Radians(Location location, Expression argument) {
super(location, argument);
public Radians(Location location, Expression field) {
super(location, field);
}
@Override
@ -19,7 +20,7 @@ public class Radians extends MathFunction {
}
@Override
protected MathProcessor processor() {
return MathProcessor.RADIANS;
protected MathOperation operation() {
return MathOperation.RADIANS;
}
}

View File

@ -6,22 +6,23 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
public class Round extends MathFunction {
public Round(Location location, Expression argument) {
super(location, argument);
public Round(Location location, Expression field) {
super(location, field);
}
@Override
protected MathOperation operation() {
return MathOperation.ROUND;
}
@Override
public DataType dataType() {
return DataTypes.LONG;
}
@Override
protected MathProcessor processor() {
return MathProcessor.ROUND;
return DataTypeConversion.asInteger(field().dataType());
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Sin extends MathFunction {
public Sin(Location location, Expression argument) {
super(location, argument);
public Sin(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.SIN;
protected MathOperation operation() {
return MathOperation.SIN;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Sinh extends MathFunction {
public Sinh(Location location, Expression argument) {
super(location, argument);
public Sinh(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.SINH;
protected MathOperation operation() {
return MathOperation.SINH;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Sqrt extends MathFunction {
public Sqrt(Location location, Expression argument) {
super(location, argument);
public Sqrt(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.SQRT;
protected MathOperation operation() {
return MathOperation.SQRT;
}
}

View File

@ -6,15 +6,16 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.tree.Location;
public class Tan extends MathFunction {
public Tan(Location location, Expression argument) {
super(location, argument);
public Tan(Location location, Expression field) {
super(location, field);
}
@Override
protected MathProcessor processor() {
return MathProcessor.TAN;
protected MathOperation operation() {
return MathOperation.TAN;
}
}

View File

@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition;
import org.elasticsearch.xpack.sql.expression.Expression;
import java.util.Objects;
public class AggPathInput extends UnresolvedInput<String> {
private final String innerKey;
public AggPathInput(Expression expression, String context) {
this(expression, context, null);
}
public AggPathInput(Expression expression, String context, String innerKey) {
super(expression, context);
this.innerKey = innerKey;
}
public String innerKey() {
return innerKey;
}
@Override
public int hashCode() {
return Objects.hash(context(), innerKey);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AggPathInput other = (AggPathInput) obj;
return Objects.equals(context(), other.context())
&& Objects.equals(innerKey, other.innerKey);
}
}

Some files were not shown because too many files have changed in this diff Show More