SQL: Failing Group By queries due to different ExpressionIds (#43072)

Fix an issue that arises from the use of ExpressionIds as keys in a lookup map
that helps the QueryTranslator to identify the grouping columns. The issue is
that the same expression in different parts of the query (SELECT clause and GROUP BY clause)
ends up with different ExpressionIds so the lookup fails. So, instead of ExpressionIds
use the hashCode() of NamedExpression.

Fixes: #41159
Fixes: #40001
Fixes: #40240
Fixes: #33361
Fixes: #46316
Fixes: #36074
Fixes: #34543
Fixes: #37044

Fixes: #42041
(cherry picked from commit 3c38ea555984fcd2c6bf9e39d0f47a01b09e7c48)
This commit is contained in:
emasab 2019-10-31 14:15:04 +01:00 committed by Marios Trivyzas
parent 7ea74918e1
commit 185e067442
26 changed files with 627 additions and 99 deletions

View File

@ -246,6 +246,194 @@ TRUNCATE(YEAR("birth_date"), -2)
null null
1900 1900
; ;
// Fails for H2
groupByCastScalarWithNumericRef
SELECT CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT) FROM test_emp GROUP BY 1 ORDER BY 1 NULLS FIRST;
CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT):l
------------------------------------------------------
null
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
;
groupByConvertScalar
SELECT CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT) FROM test_emp GROUP BY CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT) ORDER BY CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT) NULLS FIRST;
CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT):l
-----------------------------------------------------------
null
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
;
groupByConvertScalarWithAlias
SELECT CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT) as "convert" FROM test_emp GROUP BY "convert" ORDER BY "convert" NULLS FIRST;
convert:l
---------
null
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
;
groupByConvertScalarWithNumericRef
SELECT CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT) FROM test_emp GROUP BY 1 ORDER BY 1 NULLS FIRST;
CONVERT(ABS(EXTRACT(YEAR FROM "birth_date")), SQL_BIGINT):l
-----------------------------------------------------------
null
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
;
groupByConstantScalar
SELECT PI() * emp_no FROM test_emp GROUP BY PI() * emp_no ORDER BY PI() * emp_no LIMIT 10;
PI() * emp_no:d
---------------
31419.0681285515
31422.2097212051
31425.3513138587
31428.4929065123
31431.6344991659
31434.7760918195
31437.9176844731
31441.0592771266
31444.2008697802
31447.3424624338
;
groupByConstantScalarWithOrderByDesc
SELECT PI() * emp_no FROM test_emp GROUP BY PI() * emp_no ORDER BY PI() * emp_no DESC LIMIT 10;
PI() * emp_no:d
-------
31730.0858012569
31726.9442086033
31723.8026159497
31720.6610232961
31717.5194306425
31714.3778379889
31711.2362453353
31708.0946526817
31704.9530600281
31701.8114673746
;
groupByConstantScalarWithAlias
SELECT PI() * emp_no AS "value" FROM test_emp GROUP BY value ORDER BY value LIMIT 10;
value:d
-------
31419.0681285515
31422.2097212051
31425.3513138587
31428.4929065123
31431.6344991659
31434.7760918195
31437.9176844731
31441.0592771266
31444.2008697802
31447.3424624338
;
groupByConstantScalarWithNumericRef
SELECT PI() * emp_no FROM test_emp GROUP BY 1 ORDER BY 1 DESC LIMIT 10;
PI() * emp_no:d
-------
31730.0858012569
31726.9442086033
31723.8026159497
31720.6610232961
31717.5194306425
31714.3778379889
31711.2362453353
31708.0946526817
31704.9530600281
31701.8114673746
;
groupByFieldAndConstantScalarWithMultipleOrderBy
SELECT gender, emp_no % 3 + PI() FROM test_emp GROUP BY gender, emp_no % 3 + PI() ORDER BY gender, emp_no % 3 + PI() DESC LIMIT 8;
gender:s |emp_no % 3 + PI():d
------------+------------------
null |5.1415926535
null |4.1415926535
null |3.1415926535
F |5.1415926535
F |4.1415926535
F |3.1415926535
M |5.1415926535
M |4.1415926535
;
groupByFieldAndConstantScalarWithAliasWithOrderByDesc
SELECT gender, emp_no % 3 + PI() as p FROM test_emp GROUP BY gender, emp_no % 3 + PI() ORDER BY gender DESC, p DESC LIMIT 8;
gender:s |p:d
------------+------------------
M |5.1415926535
M |4.1415926535
M |3.1415926535
F |5.1415926535
F |4.1415926535
F |3.1415926535
null |5.1415926535
null |4.1415926535
;
// //
// Grouping functions // Grouping functions

View File

@ -51,6 +51,10 @@ groupByMulScalar
SELECT emp_no * 2 AS e FROM test_emp GROUP BY e ORDER BY e; SELECT emp_no * 2 AS e FROM test_emp GROUP BY e ORDER BY e;
groupByModScalar groupByModScalar
SELECT (emp_no % 3) + 1 AS e FROM test_emp GROUP BY e ORDER BY e; SELECT (emp_no % 3) + 1 AS e FROM test_emp GROUP BY e ORDER BY e;
groupByCastScalar
SELECT CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT) FROM test_emp GROUP BY CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT) ORDER BY CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT) NULLS FIRST;
groupByCastScalarWithAlias
SELECT CAST(ABS(EXTRACT(YEAR FROM "birth_date")) AS BIGINT) as "cast" FROM test_emp GROUP BY "cast" ORDER BY "cast" NULLS FIRST;
// group by nested functions with no alias // group by nested functions with no alias
groupByTruncate groupByTruncate

View File

@ -66,6 +66,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@ -609,12 +610,15 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
.map(or -> tryResolveExpression(or, o.child())) .map(or -> tryResolveExpression(or, o.child()))
.collect(toList()); .collect(toList());
AttributeSet resolvedRefs = Expressions.references(maybeResolved.stream()
.filter(Expression::resolved)
.collect(toList()));
Set<Expression> resolvedRefs = maybeResolved.stream()
.filter(Expression::resolved)
.collect(Collectors.toSet());
AttributeSet missing = resolvedRefs.subtract(o.child().outputSet()); AttributeSet missing = Expressions.filterReferences(
resolvedRefs,
o.child().outputSet()
);
if (!missing.isEmpty()) { if (!missing.isEmpty()) {
// Add missing attributes but project them away afterwards // Add missing attributes but project them away afterwards

View File

@ -27,7 +27,7 @@ public class VerificationException extends AnalysisException {
public String getMessage() { public String getMessage() {
return failures.stream() return failures.stream()
.map(f -> { .map(f -> {
Location l = f.source().source().source(); Location l = f.node().source().source();
return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message();
}) })
.collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY)); .collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY));

View File

@ -84,16 +84,16 @@ public final class Verifier {
} }
static class Failure { static class Failure {
private final Node<?> source; private final Node<?> node;
private final String message; private final String message;
Failure(Node<?> source, String message) { Failure(Node<?> node, String message) {
this.source = source; this.node = node;
this.message = message; this.message = message;
} }
Node<?> source() { Node<?> node() {
return source; return node;
} }
String message() { String message() {
@ -102,7 +102,7 @@ public final class Verifier {
@Override @Override
public int hashCode() { public int hashCode() {
return source.hashCode(); return Objects.hash(node);
} }
@Override @Override
@ -116,7 +116,7 @@ public final class Verifier {
} }
Verifier.Failure other = (Verifier.Failure) obj; Verifier.Failure other = (Verifier.Failure) obj;
return Objects.equals(source, other.source); return Objects.equals(node, other.node);
} }
@Override @Override
@ -131,7 +131,7 @@ public final class Verifier {
public Map<Node<?>, String> verifyFailures(LogicalPlan plan) { public Map<Node<?>, String> verifyFailures(LogicalPlan plan) {
Collection<Failure> failures = verify(plan); Collection<Failure> failures = verify(plan);
return failures.stream().collect(toMap(Failure::source, Failure::message)); return failures.stream().collect(toMap(Failure::node, Failure::message));
} }
Collection<Failure> verify(LogicalPlan plan) { Collection<Failure> verify(LogicalPlan plan) {

View File

@ -32,14 +32,14 @@ public class AttributeMap<E> implements Map<Attribute, E> {
@Override @Override
public int hashCode() { public int hashCode() {
return attr.semanticHash(); return attr.hashCode();
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (obj instanceof AttributeWrapper) { if (obj instanceof AttributeWrapper) {
AttributeWrapper aw = (AttributeWrapper) obj; AttributeWrapper aw = (AttributeWrapper) obj;
return attr.semanticEquals(aw.attr); return attr.equals(aw.attr);
} }
return false; return false;
@ -368,4 +368,4 @@ public class AttributeMap<E> implements Map<Attribute, E> {
public String toString() { public String toString() {
return delegate.toString(); return delegate.toString();
} }
} }

View File

@ -126,9 +126,6 @@ public abstract class Expression extends Node<Expression> implements Resolvable
public abstract DataType dataType(); public abstract DataType dataType();
@Override
public abstract int hashCode();
@Override @Override
public String toString() { public String toString() {
return nodeName() + "[" + propertiesToString(false) + "]"; return nodeName() + "[" + propertiesToString(false) + "]";

View File

@ -16,6 +16,7 @@ import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -102,6 +103,31 @@ public final class Expressions {
return set; return set;
} }
public static AttributeSet filterReferences(Set<? extends Expression> exps, AttributeSet excluded) {
AttributeSet ret = new AttributeSet();
while (exps.size() > 0) {
Set<Expression> filteredExps = new LinkedHashSet<>();
for (Expression exp : exps) {
Expression attr = Expressions.attribute(exp);
if (attr == null || (excluded.contains(attr) == false)) {
filteredExps.add(exp);
}
}
ret.addAll(new AttributeSet(
filteredExps.stream().filter(c->c.children().isEmpty())
.flatMap(exp->exp.references().stream())
.collect(Collectors.toSet())
));
exps = filteredExps.stream()
.flatMap((Expression exp)->exp.children().stream())
.collect(Collectors.toSet());
}
return ret;
}
public static String name(Expression e) { public static String name(Expression e) {
return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.nodeName(); return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.nodeName();
} }

View File

@ -102,11 +102,6 @@ public class FieldAttribute extends TypedAttribute {
return new FieldAttribute(source(), this, name() + "." + type.getName(), type, qualifier(), nullable(), id(), synthetic()); return new FieldAttribute(source(), this, name() + "." + type.getName(), type, qualifier(), nullable(), id(), synthetic());
} }
@Override
protected Expression canonicalize() {
return new FieldAttribute(source(), null, "<none>", field, null, Nullability.TRUE, id(), false);
}
@Override @Override
protected Attribute clone(Source source, String name, DataType type, String qualifier, protected Attribute clone(Source source, String name, DataType type, String qualifier,
Nullability nullability, ExpressionId id, boolean synthetic) { Nullability nullability, ExpressionId id, boolean synthetic) {

View File

@ -67,7 +67,7 @@ public abstract class NamedExpression extends Expression {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(id, name, synthetic); return Objects.hash(super.hashCode(), name, synthetic);
} }
@Override @Override
@ -81,7 +81,6 @@ public abstract class NamedExpression extends Expression {
NamedExpression other = (NamedExpression) obj; NamedExpression other = (NamedExpression) obj;
return Objects.equals(synthetic, other.synthetic) return Objects.equals(synthetic, other.synthetic)
&& Objects.equals(id, other.id)
/* /*
* It is important that the line below be `name` * It is important that the line below be `name`
* and not `name()` because subclasses might override * and not `name()` because subclasses might override
@ -96,4 +95,4 @@ public abstract class NamedExpression extends Expression {
public String toString() { public String toString() {
return super.toString() + "#" + id(); return super.toString() + "#" + id();
} }
} }

View File

@ -29,11 +29,11 @@ public abstract class FunctionAttribute extends TypedAttribute {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(super.hashCode(), functionId); return Objects.hash(super.hashCode());
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(functionId, ((FunctionAttribute) obj).functionId()); return super.equals(obj);
} }
} }

View File

@ -75,14 +75,14 @@ public class AggregateFunctionAttribute extends FunctionAttribute {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(super.hashCode(), innerId, propertyPath); return Objects.hash(super.hashCode(), propertyPath);
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (super.equals(obj)) { if (super.equals(obj)) {
AggregateFunctionAttribute other = (AggregateFunctionAttribute) obj; AggregateFunctionAttribute other = (AggregateFunctionAttribute) obj;
return Objects.equals(innerId, other.innerId) && Objects.equals(propertyPath, other.propertyPath); return Objects.equals(propertyPath, other.propertyPath);
} }
return false; return false;
} }
@ -91,4 +91,4 @@ public class AggregateFunctionAttribute extends FunctionAttribute {
protected String label() { protected String label() {
return "a->" + innerId(); return "a->" + innerId();
} }
} }

View File

@ -78,11 +78,15 @@ public class Count extends AggregateFunction {
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (false == super.equals(obj)) { if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false; return false;
} }
Count other = (Count) obj; Count other = (Count) obj;
return Objects.equals(other.distinct(), distinct()); return Objects.equals(other.distinct(), distinct())
&& Objects.equals(field(), other.field());
} }
@Override @Override

View File

@ -5,6 +5,8 @@
*/ */
package org.elasticsearch.xpack.sql.expression.gen.script; package org.elasticsearch.xpack.sql.expression.gen.script;
import java.util.Objects;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
abstract class Param<T> { abstract class Param<T> {
@ -24,4 +26,24 @@ abstract class Param<T> {
public String toString() { public String toString() {
return format(null, "{{}={}}", prefix(), value); return format(null, "{{}={}}", prefix(), value);
} }
@Override
public int hashCode() {
if (this.value == null) {
return Objects.hashCode(null);
}
return this.value.hashCode();
}
@Override
public boolean equals(Object obj) {
if ((obj instanceof Param) == false) {
return false;
}
if (this.value == null) {
return ((Param)obj).value == null;
}
return this.value.equals(((Param)obj).value);
}
} }

View File

@ -124,4 +124,17 @@ public class Params {
public String toString() { public String toString() {
return params.toString(); return params.toString();
} }
}
@Override
public int hashCode() {
return this.params.hashCode();
}
@Override
public boolean equals(Object obj) {
if ((obj instanceof Params) == false) {
return false;
}
return this.params.equals(((Params)obj).params);
}
}

View File

@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.tree.Source;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@ -40,6 +41,14 @@ public class Pivot extends UnaryPlan {
this.aggregates = aggregates; this.aggregates = aggregates;
} }
private static Expression withQualifierNull(Expression e) {
if (e instanceof Attribute) {
Attribute fa = (Attribute) e;
return fa.withQualifier(null);
}
return e;
}
@Override @Override
protected NodeInfo<Pivot> info() { protected NodeInfo<Pivot> info() {
return NodeInfo.create(this, Pivot::new, child(), column, values, aggregates); return NodeInfo.create(this, Pivot::new, child(), column, values, aggregates);
@ -47,7 +56,22 @@ public class Pivot extends UnaryPlan {
@Override @Override
protected Pivot replaceChild(LogicalPlan newChild) { protected Pivot replaceChild(LogicalPlan newChild) {
return new Pivot(source(), newChild, column, values, aggregates); Expression newColumn = column;
List<NamedExpression> newAggregates = aggregates;
if (newChild instanceof EsRelation) {
// when changing from a SubQueryAlias to EsRelation
// the qualifier of the column and aggregates needs
// to be changed to null like the attributes of EsRelation
// otherwise they don't equal and aren't removed
// when calculating the groupingSet
newColumn = column.transformUp(Pivot::withQualifierNull);
newAggregates = aggregates.stream().map((NamedExpression aggregate) ->
(NamedExpression) aggregate.transformUp(Pivot::withQualifierNull)
).collect(Collectors.toList());
}
return new Pivot(source(), newChild, newColumn, values, newAggregates);
} }
public Expression column() { public Expression column() {
@ -61,7 +85,7 @@ public class Pivot extends UnaryPlan {
public List<NamedExpression> aggregates() { public List<NamedExpression> aggregates() {
return aggregates; return aggregates;
} }
public AttributeSet groupingSet() { public AttributeSet groupingSet() {
if (groupingSet == null) { if (groupingSet == null) {
AttributeSet columnSet = Expressions.references(singletonList(column)); AttributeSet columnSet = Expressions.references(singletonList(column));
@ -83,7 +107,7 @@ public class Pivot extends UnaryPlan {
if (aggregates.size() == 1) { if (aggregates.size() == 1) {
NamedExpression agg = aggregates.get(0); NamedExpression agg = aggregates.get(0);
for (NamedExpression value : values) { for (NamedExpression value : values) {
ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); ExpressionId id = value.id();
out.add(value.toAttribute().withDataType(agg.dataType()).withId(id)); out.add(value.toAttribute().withDataType(agg.dataType()).withId(id));
} }
} }
@ -92,7 +116,7 @@ public class Pivot extends UnaryPlan {
for (NamedExpression agg : aggregates) { for (NamedExpression agg : aggregates) {
String name = agg instanceof Function ? ((Function) agg).functionName() : agg.name(); String name = agg instanceof Function ? ((Function) agg).functionName() : agg.name();
for (NamedExpression value : values) { for (NamedExpression value : values) {
ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); ExpressionId id = value.id();
out.add(value.toAttribute().withName(value.name() + "_" + name).withDataType(agg.dataType()).withId(id)); out.add(value.toAttribute().withName(value.name() + "_" + name).withDataType(agg.dataType()).withId(id));
} }
} }
@ -101,7 +125,7 @@ public class Pivot extends UnaryPlan {
} }
return valueOutput; return valueOutput;
} }
@Override @Override
public List<Attribute> output() { public List<Attribute> output() {
if (output == null) { if (output == null) {
@ -122,21 +146,21 @@ public class Pivot extends UnaryPlan {
public int hashCode() { public int hashCode() {
return Objects.hash(column, values, aggregates, child()); return Objects.hash(column, values, aggregates, child());
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (this == obj) { if (this == obj) {
return true; return true;
} }
if (obj == null || getClass() != obj.getClass()) { if (obj == null || getClass() != obj.getClass()) {
return false; return false;
} }
Pivot other = (Pivot) obj; Pivot other = (Pivot) obj;
return Objects.equals(column, other.column) return Objects.equals(column, other.column)
&& Objects.equals(values, other.values) && Objects.equals(values, other.values)
&& Objects.equals(aggregates, other.aggregates) && Objects.equals(aggregates, other.aggregates)
&& Objects.equals(child(), other.child()); && Objects.equals(child(), other.child());
} }
} }

View File

@ -72,6 +72,7 @@ import org.elasticsearch.xpack.sql.util.Check;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -122,7 +123,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
EsQueryExec exec = (EsQueryExec) project.child(); EsQueryExec exec = (EsQueryExec) project.child();
QueryContainer queryC = exec.queryContainer(); QueryContainer queryC = exec.queryContainer();
Map<Attribute, Attribute> aliases = new LinkedHashMap<>(queryC.aliases()); Map<ExpressionId, Attribute> aliases = new LinkedHashMap<>(queryC.aliases());
Map<Attribute, Pipe> processors = new LinkedHashMap<>(queryC.scalarFunctions()); Map<Attribute, Pipe> processors = new LinkedHashMap<>(queryC.scalarFunctions());
for (NamedExpression pj : project.projections()) { for (NamedExpression pj : project.projections()) {
@ -132,7 +133,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
if (e instanceof NamedExpression) { if (e instanceof NamedExpression) {
Attribute attr = ((NamedExpression) e).toAttribute(); Attribute attr = ((NamedExpression) e).toAttribute();
aliases.put(aliasAttr, attr); aliases.put(aliasAttr.id(), attr);
// add placeholder for each scalar function // add placeholder for each scalar function
if (e instanceof ScalarFunction) { if (e instanceof ScalarFunction) {
processors.put(attr, Expressions.pipe(e)); processors.put(attr, Expressions.pipe(e));
@ -153,7 +154,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
} }
QueryContainer clone = new QueryContainer(queryC.query(), queryC.aggs(), queryC.fields(), QueryContainer clone = new QueryContainer(queryC.query(), queryC.aggs(), queryC.fields(),
new AttributeMap<>(aliases), new HashMap<>(aliases),
queryC.pseudoFunctions(), queryC.pseudoFunctions(),
new AttributeMap<>(processors), new AttributeMap<>(processors),
queryC.sort(), queryC.sort(),
@ -234,7 +235,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
queryC = queryC.addGroups(groupingContext.groupMap.values()); queryC = queryC.addGroups(groupingContext.groupMap.values());
} }
Map<Attribute, Attribute> aliases = new LinkedHashMap<>(); Map<ExpressionId, Attribute> aliases = new LinkedHashMap<>();
// tracker for compound aggs seen in a group // tracker for compound aggs seen in a group
Map<CompoundNumericAggregate, String> compoundAggMap = new LinkedHashMap<>(); Map<CompoundNumericAggregate, String> compoundAggMap = new LinkedHashMap<>();
@ -262,7 +263,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
// record aliases in case they are later referred in the tree // record aliases in case they are later referred in the tree
if (as != null && as.child() instanceof NamedExpression) { if (as != null && as.child() instanceof NamedExpression) {
aliases.put(as.toAttribute(), ((NamedExpression) as.child()).toAttribute()); aliases.put(as.toAttribute().id(), ((NamedExpression) as.child()).toAttribute());
} }
// //
@ -392,9 +393,9 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
} }
if (!aliases.isEmpty()) { if (!aliases.isEmpty()) {
Map<Attribute, Attribute> newAliases = new LinkedHashMap<>(queryC.aliases()); Map<ExpressionId, Attribute> newAliases = new LinkedHashMap<>(queryC.aliases());
newAliases.putAll(aliases); newAliases.putAll(aliases);
queryC = queryC.withAliases(new AttributeMap<>(newAliases)); queryC = queryC.withAliases(new HashMap<>(newAliases));
} }
return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC);
} }
@ -481,20 +482,12 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
// check whether sorting is on an group (and thus nested agg) or field // check whether sorting is on an group (and thus nested agg) or field
Attribute attr = ((NamedExpression) order.child()).toAttribute(); Attribute attr = ((NamedExpression) order.child()).toAttribute();
// check whether there's an alias (occurs with scalar functions which are not named) // check whether there's an alias (occurs with scalar functions which are not named)
attr = qContainer.aliases().getOrDefault(attr, attr); attr = qContainer.aliases().getOrDefault(attr.id(), attr);
String lookup = attr.id().toString(); GroupByKey group = qContainer.findGroupForAgg(attr);
GroupByKey group = qContainer.findGroupForAgg(lookup);
// TODO: might need to validate whether the target field or group actually exist // TODO: might need to validate whether the target field or group actually exist
if (group != null && group != Aggs.IMPLICIT_GROUP_KEY) { if (group != null && group != Aggs.IMPLICIT_GROUP_KEY) {
// check whether the lookup matches a group qContainer = qContainer.updateGroup(group.with(direction));
if (group.id().equals(lookup)) {
qContainer = qContainer.updateGroup(group.with(direction));
}
// else it's a leafAgg
else {
qContainer = qContainer.updateGroup(group.with(direction));
}
} }
else { else {
// scalar functions typically require script ordering // scalar functions typically require script ordering
@ -504,7 +497,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
if (sfa.orderBy() != null) { if (sfa.orderBy() != null) {
if (sfa.orderBy() instanceof NamedExpression) { if (sfa.orderBy() instanceof NamedExpression) {
Attribute at = ((NamedExpression) sfa.orderBy()).toAttribute(); Attribute at = ((NamedExpression) sfa.orderBy()).toAttribute();
at = qContainer.aliases().getOrDefault(at, at); at = qContainer.aliases().getOrDefault(at.id(), at);
qContainer = qContainer.addSort(new AttributeSort(at, direction, missing)); qContainer = qContainer.addSort(new AttributeSort(at, direction, missing));
} else if (!sfa.orderBy().foldable()) { } else if (!sfa.orderBy().foldable()) {
// ignore constant // ignore constant

View File

@ -12,7 +12,6 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.Foldables;
@ -210,14 +209,14 @@ final class QueryTranslator {
} }
static class GroupingContext { static class GroupingContext {
final Map<ExpressionId, GroupByKey> groupMap; final Map<NamedExpression, GroupByKey> groupMap;
final GroupByKey tail; final GroupByKey tail;
GroupingContext(Map<ExpressionId, GroupByKey> groupMap) { GroupingContext(Map<NamedExpression, GroupByKey> groupMap) {
this.groupMap = groupMap; this.groupMap = groupMap;
GroupByKey lastAgg = null; GroupByKey lastAgg = null;
for (Entry<ExpressionId, GroupByKey> entry : groupMap.entrySet()) { for (Entry<NamedExpression, GroupByKey> entry : groupMap.entrySet()) {
lastAgg = entry.getValue(); lastAgg = entry.getValue();
} }
@ -232,7 +231,7 @@ final class QueryTranslator {
GroupByKey matchingGroup = null; GroupByKey matchingGroup = null;
// group found - finding the dedicated agg // group found - finding the dedicated agg
if (f.field() instanceof NamedExpression) { if (f.field() instanceof NamedExpression) {
matchingGroup = groupMap.get(((NamedExpression) f.field()).id()); matchingGroup = groupMap.get(f.field());
} }
// return matching group or the tail (last group) // return matching group or the tail (last group)
return matchingGroup != null ? matchingGroup : tail; return matchingGroup != null ? matchingGroup : tail;
@ -242,7 +241,7 @@ final class QueryTranslator {
} }
} }
if (exp instanceof NamedExpression) { if (exp instanceof NamedExpression) {
return groupMap.get(((NamedExpression) exp).id()); return groupMap.get(exp);
} }
throw new SqlIllegalArgumentException("Don't know how to find group for expression {}", exp); throw new SqlIllegalArgumentException("Don't know how to find group for expression {}", exp);
} }
@ -261,18 +260,18 @@ final class QueryTranslator {
return null; return null;
} }
Map<ExpressionId, GroupByKey> aggMap = new LinkedHashMap<>(); Map<NamedExpression, GroupByKey> aggMap = new LinkedHashMap<>();
for (Expression exp : groupings) { for (Expression exp : groupings) {
GroupByKey key = null; GroupByKey key = null;
ExpressionId id; NamedExpression id;
String aggId; String aggId;
if (exp instanceof NamedExpression) { if (exp instanceof NamedExpression) {
NamedExpression ne = (NamedExpression) exp; NamedExpression ne = (NamedExpression) exp;
id = ne.id(); id = ne;
aggId = id.toString(); aggId = ne.id().toString();
// change analyzed to non non-analyzed attributes // change analyzed to non non-analyzed attributes
if (exp instanceof FieldAttribute) { if (exp instanceof FieldAttribute) {

View File

@ -10,6 +10,8 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregati
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
import org.elasticsearch.xpack.sql.util.StringUtils; import org.elasticsearch.xpack.sql.util.StringUtils;
@ -121,16 +123,23 @@ public class Aggs {
return new Aggs(groups, simpleAggs, combine(pipelineAggs, pipelineAgg)); return new Aggs(groups, simpleAggs, combine(pipelineAggs, pipelineAgg));
} }
public GroupByKey findGroupForAgg(String groupOrAggId) { public GroupByKey findGroupForAgg(Attribute attr) {
String id = attr.id().toString();
for (GroupByKey group : this.groups) { for (GroupByKey group : this.groups) {
if (groupOrAggId.equals(group.id())) { if (id.equals(group.id())) {
return group; return group;
} }
if (attr instanceof ScalarFunctionAttribute) {
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) attr;
if (group.script() != null && group.script().equals(sfa.script())) {
return group;
}
}
} }
// maybe it's the default group agg ? // maybe it's the default group agg ?
for (Agg agg : simpleAggs) { for (Agg agg : simpleAggs) {
if (groupOrAggId.equals(agg.id())) { if (id.equals(agg.id())) {
return IMPLICIT_GROUP_KEY; return IMPLICIT_GROUP_KEY;
} }
} }

View File

@ -38,6 +38,7 @@ import java.util.AbstractMap;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.BitSet; import java.util.BitSet;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
@ -70,7 +71,7 @@ public class QueryContainer {
private final List<Tuple<FieldExtraction, ExpressionId>> fields; private final List<Tuple<FieldExtraction, ExpressionId>> fields;
// aliases (maps an alias to its actual resolved attribute) // aliases (maps an alias to its actual resolved attribute)
private final AttributeMap<Attribute> aliases; private final Map<ExpressionId, Attribute> aliases;
// pseudo functions (like count) - that are 'extracted' from other aggs // pseudo functions (like count) - that are 'extracted' from other aggs
private final Map<String, GroupByKey> pseudoFunctions; private final Map<String, GroupByKey> pseudoFunctions;
@ -98,7 +99,7 @@ public class QueryContainer {
Aggs aggs, Aggs aggs,
List<Tuple<FieldExtraction, List<Tuple<FieldExtraction,
ExpressionId>> fields, ExpressionId>> fields,
AttributeMap<Attribute> aliases, Map<ExpressionId, Attribute> aliases,
Map<String, GroupByKey> pseudoFunctions, Map<String, GroupByKey> pseudoFunctions,
AttributeMap<Pipe> scalarFunctions, AttributeMap<Pipe> scalarFunctions,
Set<Sort> sort, Set<Sort> sort,
@ -109,7 +110,7 @@ public class QueryContainer {
this.query = query; this.query = query;
this.aggs = aggs == null ? Aggs.EMPTY : aggs; this.aggs = aggs == null ? Aggs.EMPTY : aggs;
this.fields = fields == null || fields.isEmpty() ? emptyList() : fields; this.fields = fields == null || fields.isEmpty() ? emptyList() : fields;
this.aliases = aliases == null || aliases.isEmpty() ? AttributeMap.emptyAttributeMap() : aliases; this.aliases = aliases == null || aliases.isEmpty() ? Collections.emptyMap() : aliases;
this.pseudoFunctions = pseudoFunctions == null || pseudoFunctions.isEmpty() ? emptyMap() : pseudoFunctions; this.pseudoFunctions = pseudoFunctions == null || pseudoFunctions.isEmpty() ? emptyMap() : pseudoFunctions;
this.scalarFunctions = scalarFunctions == null || scalarFunctions.isEmpty() ? AttributeMap.emptyAttributeMap() : scalarFunctions; this.scalarFunctions = scalarFunctions == null || scalarFunctions.isEmpty() ? AttributeMap.emptyAttributeMap() : scalarFunctions;
this.sort = sort == null || sort.isEmpty() ? emptySet() : sort; this.sort = sort == null || sort.isEmpty() ? emptySet() : sort;
@ -141,7 +142,7 @@ public class QueryContainer {
if (as.attribute() instanceof AggregateFunctionAttribute) { if (as.attribute() instanceof AggregateFunctionAttribute) {
aggSort = true; aggSort = true;
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) as.attribute(); AggregateFunctionAttribute afa = (AggregateFunctionAttribute) as.attribute();
afa = (AggregateFunctionAttribute) aliases.getOrDefault(afa, afa); afa = (AggregateFunctionAttribute) aliases.getOrDefault(afa.innerId(), afa);
int atIndex = -1; int atIndex = -1;
for (int i = 0; i < fields.size(); i++) { for (int i = 0; i < fields.size(); i++) {
Tuple<FieldExtraction, ExpressionId> field = fields.get(i); Tuple<FieldExtraction, ExpressionId> field = fields.get(i);
@ -179,7 +180,7 @@ public class QueryContainer {
public BitSet columnMask(List<Attribute> columns) { public BitSet columnMask(List<Attribute> columns) {
BitSet mask = new BitSet(fields.size()); BitSet mask = new BitSet(fields.size());
for (Attribute column : columns) { for (Attribute column : columns) {
Attribute alias = aliases.get(column); Attribute alias = aliases.get(column.id());
// find the column index // find the column index
int index = -1; int index = -1;
@ -217,7 +218,7 @@ public class QueryContainer {
return fields; return fields;
} }
public AttributeMap<Attribute> aliases() { public Map<ExpressionId, Attribute> aliases() {
return aliases; return aliases;
} }
@ -271,7 +272,7 @@ public class QueryContainer {
minPageSize); minPageSize);
} }
public QueryContainer withAliases(AttributeMap<Attribute> a) { public QueryContainer withAliases(Map<ExpressionId, Attribute> a) {
return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen,
minPageSize); minPageSize);
} }
@ -312,7 +313,7 @@ public class QueryContainer {
} }
private String aliasName(Attribute attr) { private String aliasName(Attribute attr) {
return aliases.getOrDefault(attr, attr).name(); return aliases.getOrDefault(attr.id(), attr).name();
} }
// //
@ -397,7 +398,7 @@ public class QueryContainer {
// replace function/operators's input with references // replace function/operators's input with references
private Tuple<QueryContainer, FieldExtraction> resolvedTreeComputingRef(ScalarFunctionAttribute ta) { private Tuple<QueryContainer, FieldExtraction> resolvedTreeComputingRef(ScalarFunctionAttribute ta) {
Attribute attribute = aliases.getOrDefault(ta, ta); Attribute attribute = aliases.getOrDefault(ta.id(), ta);
Pipe proc = scalarFunctions.get(attribute); Pipe proc = scalarFunctions.get(attribute);
// check the attribute itself // check the attribute itself
@ -419,7 +420,7 @@ public class QueryContainer {
@Override @Override
public FieldExtraction resolve(Attribute attribute) { public FieldExtraction resolve(Attribute attribute) {
Attribute attr = aliases.getOrDefault(attribute, attribute); Attribute attr = aliases.getOrDefault(attribute.id(), attribute);
Tuple<QueryContainer, FieldExtraction> ref = container.toReference(attr); Tuple<QueryContainer, FieldExtraction> ref = container.toReference(attr);
container = ref.v1(); container = ref.v1();
return ref.v2(); return ref.v2();
@ -486,8 +487,8 @@ public class QueryContainer {
return with(aggs.addGroups(values)); return with(aggs.addGroups(values));
} }
public GroupByKey findGroupForAgg(String aggId) { public GroupByKey findGroupForAgg(Attribute attr) {
return aggs.findGroupForAgg(aggId); return aggs.findGroupForAgg(attr);
} }
public QueryContainer updateGroup(GroupByKey group) { public QueryContainer updateGroup(GroupByKey group) {

View File

@ -377,7 +377,8 @@ public class VerifierErrorMessagesTests extends ESTestCase {
} }
public void testMultipleColumns() { public void testMultipleColumns() {
assertEquals("1:43: Unknown column [xxx]\nline 1:8: Unknown column [xxx]", // We get only one message back because the messages are grouped by the node that caused the issue
assertEquals("1:43: Unknown column [xxx]",
error("SELECT xxx FROM test GROUP BY DAY_oF_YEAR(xxx)")); error("SELECT xxx FROM test GROUP BY DAY_oF_YEAR(xxx)"));
} }

View File

@ -55,7 +55,7 @@ public class AttributeMapTests extends ESTestCase {
Attribute one = m.keySet().iterator().next(); Attribute one = m.keySet().iterator().next();
assertThat(m.containsKey(one), is(true)); assertThat(m.containsKey(one), is(true));
assertThat(m.containsKey(a("one")), is(false)); assertThat(m.containsKey(a("one")), is(true));
assertThat(m.containsValue("one"), is(true)); assertThat(m.containsValue("one"), is(true));
assertThat(m.containsValue("on"), is(false)); assertThat(m.containsValue("on"), is(false));
assertThat(m.attributeNames(), contains("one", "two", "three")); assertThat(m.attributeNames(), contains("one", "two", "three"));
@ -74,7 +74,7 @@ public class AttributeMapTests extends ESTestCase {
assertThat(m.isEmpty(), is(false)); assertThat(m.isEmpty(), is(false));
assertThat(m.containsKey(one), is(true)); assertThat(m.containsKey(one), is(true));
assertThat(m.containsKey(a("one")), is(false)); assertThat(m.containsKey(a("one")), is(true));
assertThat(m.containsValue("one"), is(true)); assertThat(m.containsValue("one"), is(true));
assertThat(m.containsValue("on"), is(false)); assertThat(m.containsValue("on"), is(false));
} }
@ -178,4 +178,4 @@ public class AttributeMapTests extends ESTestCase {
assertThat(m, is(copy)); assertThat(m, is(copy));
} }
} }

View File

@ -54,8 +54,6 @@ public class UnresolvedAttributeTests extends AbstractNodeTestCase<UnresolvedAtt
() -> new UnresolvedAttribute(a.source(), a.name(), () -> new UnresolvedAttribute(a.source(), a.name(),
randomValueOtherThan(a.qualifier(), UnresolvedAttributeTests::randomQualifier), randomValueOtherThan(a.qualifier(), UnresolvedAttributeTests::randomQualifier),
a.id(), a.unresolvedMessage(), a.resolutionMetadata()), a.id(), a.unresolvedMessage(), a.resolutionMetadata()),
() -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(),
new ExpressionId(), a.unresolvedMessage(), a.resolutionMetadata()),
() -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), () -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(),
randomValueOtherThan(a.unresolvedMessage(), () -> randomUnresolvedMessage()), randomValueOtherThan(a.unresolvedMessage(), () -> randomUnresolvedMessage()),
a.resolutionMetadata()), a.resolutionMetadata()),

View File

@ -236,7 +236,7 @@ public class OptimizerTests extends ESTestCase {
assertTrue(result instanceof Project); assertTrue(result instanceof Project);
List<? extends NamedExpression> projections = ((Project) result).projections(); List<? extends NamedExpression> projections = ((Project) result).projections();
assertEquals(2, projections.size()); assertEquals(2, projections.size());
assertSame(projections.get(0), projections.get(1)); assertEquals(projections.get(0), projections.get(1));
} }
public void testCombineProjections() { public void testCombineProjections() {

View File

@ -1132,6 +1132,257 @@ public class QueryTranslatorTests extends ESTestCase {
+ "\"gap_policy\":\"skip\"}}}}}")); + "\"gap_policy\":\"skip\"}}}}}"));
} }
public void testGroupByCastScalar() {
PhysicalPlan p = optimizeAndPlan("SELECT CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT) FROM test " +
"GROUP BY CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT) ORDER BY CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT) NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT)", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
public void testGroupByCastScalarWithAlias() {
PhysicalPlan p = optimizeAndPlan("SELECT CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT) as \"cast\" FROM test " +
"GROUP BY \"cast\" ORDER BY \"cast\" NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("cast", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
public void testGroupByCastScalarWithNumericRef() {
PhysicalPlan p = optimizeAndPlan("SELECT CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT) FROM test " +
"GROUP BY 1 ORDER BY 1 NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("CAST(ABS(EXTRACT(YEAR FROM date)) AS BIGINT)", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
public void testGroupByConvertScalar() {
{
PhysicalPlan p = optimizeAndPlan("SELECT CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT) FROM test " +
"GROUP BY CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT) ORDER BY CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT) " +
"NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT)", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
{
PhysicalPlan p = optimizeAndPlan("SELECT EXTRACT(HOUR FROM CONVERT(date, SQL_TIMESTAMP)) FROM test GROUP BY " +
"EXTRACT(HOUR FROM CONVERT(date, SQL_TIMESTAMP))");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("EXTRACT(HOUR FROM CONVERT(date, SQL_TIMESTAMP))", p.output().get(0).qualifiedName());
assertEquals(DataType.INTEGER, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.dateTimeChrono(" +
"InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"HOUR_OF_DAY\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
}
public void testGroupByConvertScalarWithAlias() {
{
PhysicalPlan p = optimizeAndPlan("SELECT CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT) as \"convert\" FROM test " +
"GROUP BY \"convert\" ORDER BY \"convert\" NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("convert", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
{
PhysicalPlan p = optimizeAndPlan("SELECT EXTRACT(MINUTE FROM CONVERT(date, SQL_TIMESTAMP)) x FROM test GROUP BY x");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("x", p.output().get(0).qualifiedName());
assertEquals(DataType.INTEGER, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.dateTimeChrono(" +
"InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"MINUTE_OF_HOUR\"}}," +
"\"missing_bucket\":true,\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
}
public void testGroupByConvertScalarWithNumericRef() {
PhysicalPlan p = optimizeAndPlan("SELECT CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT) FROM test " +
"GROUP BY 1 ORDER BY 1 NULLS FIRST");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("CONVERT(ABS(EXTRACT(YEAR FROM date)), SQL_BIGINT)", p.output().get(0).qualifiedName());
assertEquals(DataType.LONG, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.cast(InternalSqlScriptUtils.abs(InternalSqlScriptUtils.dateTimeChrono" +
"(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1,params.v2)),params.v3)\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"Z\",\"v2\":\"YEAR\",\"v3\":\"LONG\"}},\"missing_bucket\":true," +
"\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
public void testGroupByConstantScalar() {
PhysicalPlan p = optimizeAndPlan("SELECT PI() * int FROM test WHERE PI() * int > 5.0 GROUP BY PI() * int " +
"ORDER BY PI() * int LIMIT 10");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("PI() * int", p.output().get(0).qualifiedName());
assertEquals(DataType.DOUBLE, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"script\":{\"source\":\"InternalSqlScriptUtils.mul(params.v0,InternalSqlScriptUtils.docValue(doc,params.v1))\"," +
"\"lang\":\"painless\",\"params\":{\"v0\":3.141592653589793,\"v1\":\"int\"}},\"missing_bucket\":true," +
"\"value_type\":\"double\",\"order\":\"asc\"}}}]}}}")
);
}
public void testGroupByConstantScalarWithAlias() {
{
PhysicalPlan p = optimizeAndPlan("SELECT PI() * int AS \"value\" FROM test GROUP BY \"value\" ORDER BY \"value\" LIMIT 10");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("value", p.output().get(0).qualifiedName());
assertEquals(DataType.DOUBLE, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"script\":{\"source\":\"InternalSqlScriptUtils.mul(params.v0,InternalSqlScriptUtils.docValue(doc,params.v1))" +
"\",\"lang\":\"painless\",\"params\":{\"v0\":3.141592653589793,\"v1\":\"int\"}},\"missing_bucket\":true," +
"\"value_type\":\"double\",\"order\":\"asc\"}}}]}}}")
);
}
{
PhysicalPlan p = optimizeAndPlan("select (3 < int) as multi_language, count(*) from test group by multi_language");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(2, p.output().size());
assertEquals("multi_language", p.output().get(0).qualifiedName());
assertEquals(DataType.BOOLEAN, p.output().get(0).dataType());
assertEquals("count(*)", p.output().get(1).qualifiedName());
assertEquals(DataType.LONG, p.output().get(1).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.gt(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)\"," +
"\"lang\":\"painless\",\"params\":{\"v0\":\"int\",\"v1\":3}}," +
"\"missing_bucket\":true,\"value_type\":\"boolean\",\"order\":\"asc\"}}}]}}}")
);
}
}
public void testGroupByConstantScalarWithNumericRef() {
{
PhysicalPlan p = optimizeAndPlan("SELECT PI() * int FROM test GROUP BY 1 ORDER BY 1 LIMIT 10");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("PI() * int", p.output().get(0).qualifiedName());
assertEquals(DataType.DOUBLE, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"script\":{\"source\":\"InternalSqlScriptUtils.mul(params.v0,InternalSqlScriptUtils.docValue(doc,params.v1))" +
"\",\"lang\":\"painless\",\"params\":{\"v0\":3.141592653589793,\"v1\":\"int\"}},\"missing_bucket\":true," +
"\"value_type\":\"double\",\"order\":\"asc\"}}}]}}}")
);
}
{
PhysicalPlan p = optimizeAndPlan("SELECT PI() * int FROM test GROUP BY 1");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("PI() * int", p.output().get(0).qualifiedName());
assertEquals(DataType.DOUBLE, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.mul(params.v0,InternalSqlScriptUtils.docValue(doc,params.v1))\"," +
"\"lang\":\"painless\",\"params\":{\"v0\":3.141592653589793,\"v1\":\"int\"}}," +
"\"missing_bucket\":true,\"value_type\":\"double\",\"order\":\"asc\"}}}]}}}")
);
}
{
PhysicalPlan p = optimizeAndPlan("SELECT date + 1 * INTERVAL '1' DAY FROM test GROUP BY 1");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("date + 1 * INTERVAL '1' DAY", p.output().get(0).qualifiedName());
assertEquals(DataType.DATETIME, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().aggs().asAggBuilder().toString()
.replaceAll("\\s+", ""),
endsWith("{\"source\":\"InternalSqlScriptUtils.add(InternalSqlScriptUtils.docValue(doc,params.v0)," +
"InternalSqlScriptUtils.intervalDayTime(params.v1,params.v2))\"," +
"\"lang\":\"painless\",\"params\":{\"v0\":\"date\",\"v1\":\"PT24H\",\"v2\":\"INTERVAL_DAY\"}}," +
"\"missing_bucket\":true,\"value_type\":\"long\",\"order\":\"asc\"}}}]}}}")
);
}
}
public void testOrderByWithCastWithMissingRefs() {
PhysicalPlan p = optimizeAndPlan("SELECT keyword FROM test ORDER BY date::TIME, int LIMIT 5");
assertEquals(EsQueryExec.class, p.getClass());
assertEquals(1, p.output().size());
assertEquals("test.keyword", p.output().get(0).qualifiedName());
assertEquals(DataType.KEYWORD, p.output().get(0).dataType());
assertThat(
((EsQueryExec) p).queryContainer().toString()
.replaceAll("\\s+", ""),
endsWith("\"sort\":[{\"_script\":{\"script\":{\"source\":\"InternalSqlScriptUtils.nullSafeSortString(InternalSqlScriptUtils" +
".cast(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1))\",\"lang\":\"painless\"," +
"\"params\":{\"v0\":\"date\",\"v1\":\"TIME\"}},\"type\":\"string\",\"order\":\"asc\"}},{\"int\":{\"order\":\"asc\"," +
"\"missing\":\"_last\",\"unmapped_type\":\"integer\"}}]}")
);
}
public void testTopHitsAggregationWithOneArg() { public void testTopHitsAggregationWithOneArg() {
{ {
PhysicalPlan p = optimizeAndPlan("SELECT FIRST(keyword) FROM test"); PhysicalPlan p = optimizeAndPlan("SELECT FIRST(keyword) FROM test");

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.querydsl.container;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.AttributeMap; import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery;
import org.elasticsearch.xpack.sql.querydsl.query.MatchAll; import org.elasticsearch.xpack.sql.querydsl.query.MatchAll;
@ -81,11 +81,11 @@ public class QueryContainerTests extends ESTestCase {
Attribute fourth = new FieldAttribute(Source.EMPTY, "fourth", esField); Attribute fourth = new FieldAttribute(Source.EMPTY, "fourth", esField);
Alias firstAliased = new Alias(Source.EMPTY, "firstAliased", first); Alias firstAliased = new Alias(Source.EMPTY, "firstAliased", first);
Map<Attribute,Attribute> aliasesMap = new LinkedHashMap<>(); Map<ExpressionId,Attribute> aliasesMap = new LinkedHashMap<>();
aliasesMap.put(firstAliased.toAttribute(), first); aliasesMap.put(firstAliased.id(), first);
QueryContainer queryContainer = new QueryContainer() QueryContainer queryContainer = new QueryContainer()
.withAliases(new AttributeMap<>(aliasesMap)) .withAliases(aliasesMap)
.addColumn(third) .addColumn(third)
.addColumn(first) .addColumn(first)
.addColumn(fourth) .addColumn(fourth)