Summary of the issue and the root cause: ``` (1) SELECT 100, 100 -> success (2) SELECT ?, ? (with params: 100, 100) -> success (3) SELECT 100, 100 FROM test -> Unknown output attribute exception for the second 100 (4) SELECT ?, ? FROM test (params: 100, 100) -> Unknown output attribute exception for the second ? (5) SELECT field1 as "x", field1 as "x" FROM test -> Unknown output attribute exception for the second "x" ``` There are two separate issues at play here: 1. Construction of `AttributeMap`s keeps only one of the `Attribute`s with the same name even if the `id`s are different (see the `AttributeMapTests` in this PR). This should be fixed no matter what, we should not overwrite attributes with one another during the construction of the `AttributeMap`. 2. The `id` on the `Alias`es is not the same in case the `Alias`es have the same `name` and same `child` It was considered to simpy fix the second issue by just reassigning the same `id`s to the `Alias`es with the same name and child, but it would not solve the `unknown output attribute exception` (see notes below). This PR covers the fix for the first issue. Relates to #56013
This commit is contained in:
parent
da0a969003
commit
3d8e17f3bd
|
@ -157,6 +157,10 @@ public class AttributeMap<E> implements Map<Attribute, E> {
|
||||||
delegate = new LinkedHashMap<>();
|
delegate = new LinkedHashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Please use the {@link AttributeMap#builder()} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
public AttributeMap(Map<Attribute, E> attr) {
|
public AttributeMap(Map<Attribute, E> attr) {
|
||||||
if (attr.isEmpty()) {
|
if (attr.isEmpty()) {
|
||||||
delegate = emptyMap();
|
delegate = emptyMap();
|
||||||
|
@ -368,4 +372,28 @@ public class AttributeMap<E> implements Map<Attribute, E> {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return delegate.toString();
|
return delegate.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static <E> Builder<E> builder() {
|
||||||
|
return new Builder<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder<E> {
|
||||||
|
private final AttributeMap<E> map = new AttributeMap<>();
|
||||||
|
|
||||||
|
private Builder() {}
|
||||||
|
|
||||||
|
public Builder<E> put(Attribute attr, E value) {
|
||||||
|
map.add(attr, value);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder<E> putAll(AttributeMap<E> m) {
|
||||||
|
map.addAll(m);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AttributeMap<E> build() {
|
||||||
|
return new AttributeMap<>(map);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,9 @@ package org.elasticsearch.xpack.ql.expression;
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xpack.ql.tree.Source;
|
import org.elasticsearch.xpack.ql.tree.Source;
|
||||||
|
import org.elasticsearch.xpack.ql.type.DataTypes;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -37,6 +39,47 @@ public class AttributeMapTests extends ESTestCase {
|
||||||
return new AttributeMap<>(map);
|
return new AttributeMap<>(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAttributeMapWithSameAliasesCanResolveAttributes() {
|
||||||
|
Alias param1 = createIntParameterAlias(1, 100);
|
||||||
|
Alias param2 = createIntParameterAlias(2, 100);
|
||||||
|
assertTrue(param1.equals(param2));
|
||||||
|
assertTrue(param1.semanticEquals(param2));
|
||||||
|
// equality on literals
|
||||||
|
assertTrue(param1.child().equals(param2.child()));
|
||||||
|
assertTrue(param1.child().semanticEquals(param2.child()));
|
||||||
|
assertTrue(param1.toAttribute().equals(param2.toAttribute()));
|
||||||
|
assertFalse(param1.toAttribute().semanticEquals(param2.toAttribute()));
|
||||||
|
|
||||||
|
Map<Attribute, Expression> collectRefs = new LinkedHashMap<>();
|
||||||
|
for (Alias a : Arrays.asList(param1, param2)) {
|
||||||
|
collectRefs.put(a.toAttribute(), a.child());
|
||||||
|
}
|
||||||
|
// we can look up the same item by both attributes
|
||||||
|
assertNotNull(collectRefs.get(param1.toAttribute()));
|
||||||
|
assertNotNull(collectRefs.get(param2.toAttribute()));
|
||||||
|
AttributeMap<Expression> attributeMap = new AttributeMap<>(collectRefs);
|
||||||
|
|
||||||
|
// validate that all Alias can be e
|
||||||
|
assertTrue(attributeMap.containsKey(param1.toAttribute()));
|
||||||
|
assertFalse(attributeMap.containsKey(param2.toAttribute())); // results in unknown attribute exception
|
||||||
|
|
||||||
|
AttributeMap.Builder<Expression> mapBuilder = AttributeMap.builder();
|
||||||
|
for (Alias a : Arrays.asList(param1, param2)) {
|
||||||
|
mapBuilder.put(a.toAttribute(), a.child());
|
||||||
|
}
|
||||||
|
AttributeMap<Expression> newAttributeMap = mapBuilder.build();
|
||||||
|
|
||||||
|
assertTrue(newAttributeMap.containsKey(param1.toAttribute()));
|
||||||
|
assertTrue(newAttributeMap.containsKey(param2.toAttribute())); // no more unknown attribute exception
|
||||||
|
}
|
||||||
|
|
||||||
|
private Alias createIntParameterAlias(int index, int value) {
|
||||||
|
Source source = new Source(1, index * 5, "?");
|
||||||
|
Literal literal = new Literal(source, value, DataTypes.INTEGER);
|
||||||
|
Alias alias = new Alias(literal.source(), literal.source().text(), literal);
|
||||||
|
return alias;
|
||||||
|
}
|
||||||
|
|
||||||
public void testEmptyConstructor() {
|
public void testEmptyConstructor() {
|
||||||
AttributeMap<Object> m = new AttributeMap<>();
|
AttributeMap<Object> m = new AttributeMap<>();
|
||||||
assertThat(m.size(), is(0));
|
assertThat(m.size(), is(0));
|
||||||
|
|
|
@ -48,6 +48,12 @@ SELECT salary, first_name, salary AS x, salary y FROM test_emp ORDER BY y LIMIT
|
||||||
|
|
||||||
constantWithLimit
|
constantWithLimit
|
||||||
SELECT 3 FROM "test_emp" LIMIT 5;
|
SELECT 3 FROM "test_emp" LIMIT 5;
|
||||||
|
sameConstantsWithLimit
|
||||||
|
SELECT 3, 3, 5 FROM "test_emp" LIMIT 5;
|
||||||
|
sameConstantsWithLimitV2
|
||||||
|
SELECT 5, 3, 3 FROM "test_emp" LIMIT 5;
|
||||||
|
sameConstantsWithLimitV3
|
||||||
|
SELECT 3, 5, 3, 3 FROM "test_emp" LIMIT 5;
|
||||||
constantAndColumnWithLimit
|
constantAndColumnWithLimit
|
||||||
SELECT 3, first_name, last_name FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
SELECT 3, first_name, last_name FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||||
constantComparisonWithLimit
|
constantComparisonWithLimit
|
||||||
|
|
|
@ -179,7 +179,7 @@ public final class Verifier {
|
||||||
|
|
||||||
if (failures.isEmpty()) {
|
if (failures.isEmpty()) {
|
||||||
Set<Failure> localFailures = new LinkedHashSet<>();
|
Set<Failure> localFailures = new LinkedHashSet<>();
|
||||||
final Map<Attribute, Expression> collectRefs = new LinkedHashMap<>();
|
AttributeMap.Builder<Expression> collectRefs = AttributeMap.builder();
|
||||||
|
|
||||||
checkFullTextSearchInSelect(plan, localFailures);
|
checkFullTextSearchInSelect(plan, localFailures);
|
||||||
|
|
||||||
|
@ -193,7 +193,7 @@ public final class Verifier {
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
AttributeMap<Expression> attributeRefs = new AttributeMap<>(collectRefs);
|
AttributeMap<Expression> attributeRefs = collectRefs.build();
|
||||||
|
|
||||||
// for filtering out duplicated errors
|
// for filtering out duplicated errors
|
||||||
final Set<LogicalPlan> groupingFailures = new LinkedHashSet<>();
|
final Set<LogicalPlan> groupingFailures = new LinkedHashSet<>();
|
||||||
|
|
|
@ -142,8 +142,8 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
||||||
EsQueryExec exec = (EsQueryExec) project.child();
|
EsQueryExec exec = (EsQueryExec) project.child();
|
||||||
QueryContainer queryC = exec.queryContainer();
|
QueryContainer queryC = exec.queryContainer();
|
||||||
|
|
||||||
Map<Attribute, Expression> aliases = new LinkedHashMap<>(queryC.aliases());
|
AttributeMap.Builder<Expression> aliases = AttributeMap.<Expression>builder().putAll(queryC.aliases());
|
||||||
Map<Attribute, Pipe> processors = new LinkedHashMap<>(queryC.scalarFunctions());
|
AttributeMap.Builder<Pipe> processors = AttributeMap.<Pipe>builder().putAll(queryC.scalarFunctions());
|
||||||
|
|
||||||
for (NamedExpression pj : project.projections()) {
|
for (NamedExpression pj : project.projections()) {
|
||||||
if (pj instanceof Alias) {
|
if (pj instanceof Alias) {
|
||||||
|
@ -161,9 +161,9 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryContainer clone = new QueryContainer(queryC.query(), queryC.aggs(), queryC.fields(),
|
QueryContainer clone = new QueryContainer(queryC.query(), queryC.aggs(), queryC.fields(),
|
||||||
new AttributeMap<>(aliases),
|
aliases.build(),
|
||||||
queryC.pseudoFunctions(),
|
queryC.pseudoFunctions(),
|
||||||
new AttributeMap<>(processors),
|
processors.build(),
|
||||||
queryC.sort(),
|
queryC.sort(),
|
||||||
queryC.limit(),
|
queryC.limit(),
|
||||||
queryC.shouldTrackHits(),
|
queryC.shouldTrackHits(),
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.xpack.sql.parser;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.xpack.ql.expression.Literal;
|
||||||
|
import org.elasticsearch.xpack.ql.expression.NamedExpression;
|
||||||
|
import org.elasticsearch.xpack.ql.expression.UnresolvedAlias;
|
||||||
|
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan;
|
||||||
|
import org.elasticsearch.xpack.ql.plan.logical.Filter;
|
||||||
|
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
|
||||||
|
import org.elasticsearch.xpack.ql.plan.logical.Project;
|
||||||
|
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.elasticsearch.xpack.ql.type.DateUtils.UTC;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.everyItem;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
|
public class ParamLiteralTests extends ESTestCase {
|
||||||
|
|
||||||
|
private final SqlParser parser = new SqlParser();
|
||||||
|
|
||||||
|
private LogicalPlan parse(String sql, SqlTypedParamValue... parameters) {
|
||||||
|
return parser.createStatement(sql, Arrays.asList(parameters), UTC);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultipleParamLiteralsWithUnresolvedAliases() {
|
||||||
|
LogicalPlan logicalPlan = parse("SELECT ?, ? FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 200)
|
||||||
|
);
|
||||||
|
List<? extends NamedExpression> projections = ((Project) logicalPlan.children().get(0)).projections();
|
||||||
|
assertThat(projections, everyItem(instanceOf(UnresolvedAlias.class)));
|
||||||
|
assertThat(projections.get(0).toString(), startsWith("100 AS ?"));
|
||||||
|
assertThat(projections.get(1).toString(), startsWith("200 AS ?"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultipleParamLiteralsWithUnresolvedAliasesAndWhereClause() {
|
||||||
|
LogicalPlan logicalPlan = parse("SELECT ?, ?, (?) FROM test WHERE 1 < ?",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 200),
|
||||||
|
new SqlTypedParamValue("integer", 300)
|
||||||
|
);
|
||||||
|
Project project = (Project) logicalPlan.children().get(0);
|
||||||
|
List<? extends NamedExpression> projections = project.projections();
|
||||||
|
assertThat(projections, everyItem(instanceOf(UnresolvedAlias.class)));
|
||||||
|
assertThat(projections.get(0).toString(), startsWith("100 AS ?"));
|
||||||
|
assertThat(projections.get(1).toString(), startsWith("100 AS ?"));
|
||||||
|
assertThat(projections.get(2).toString(), startsWith("200 AS ?"));
|
||||||
|
assertThat(project.children().get(0), instanceOf(Filter.class));
|
||||||
|
Filter filter = (Filter) project.children().get(0);
|
||||||
|
assertThat(filter.condition(), instanceOf(LessThan.class));
|
||||||
|
LessThan condition = (LessThan) filter.condition();
|
||||||
|
assertThat(condition.left(), instanceOf(Literal.class));
|
||||||
|
assertThat(condition.right(), instanceOf(Literal.class));
|
||||||
|
assertThat(((Literal)condition.right()).value(), equalTo(300));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testParamLiteralsWithUnresolvedAliasesAndMixedTypes() {
|
||||||
|
LogicalPlan logicalPlan = parse("SELECT ?, ? FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("text", "100")
|
||||||
|
);
|
||||||
|
List<? extends NamedExpression> projections = ((Project) logicalPlan.children().get(0)).projections();
|
||||||
|
assertThat(projections, everyItem(instanceOf(UnresolvedAlias.class)));
|
||||||
|
assertThat(projections.get(0).toString(), startsWith("100 AS ?"));
|
||||||
|
assertThat(projections.get(1).toString(), startsWith("100 AS ?"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testParamLiteralsWithResolvedAndUnresolvedAliases() {
|
||||||
|
LogicalPlan logicalPlan = parse("SELECT ?, ? as x, ? FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 200),
|
||||||
|
new SqlTypedParamValue("integer", 300)
|
||||||
|
);
|
||||||
|
List<? extends NamedExpression> projections = ((Project) logicalPlan.children().get(0)).projections();
|
||||||
|
assertThat(projections.get(0).toString(), startsWith("100 AS ?"));
|
||||||
|
assertThat(projections.get(1).toString(), startsWith("200 AS x#"));;
|
||||||
|
assertThat(projections.get(2).toString(), startsWith("300 AS ?"));;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -19,6 +19,7 @@ import org.elasticsearch.xpack.ql.expression.Attribute;
|
||||||
import org.elasticsearch.xpack.ql.expression.Expression;
|
import org.elasticsearch.xpack.ql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.ql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.ql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.ql.expression.Literal;
|
import org.elasticsearch.xpack.ql.expression.Literal;
|
||||||
|
import org.elasticsearch.xpack.ql.expression.ReferenceAttribute;
|
||||||
import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition;
|
import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition;
|
||||||
import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction;
|
import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction;
|
||||||
import org.elasticsearch.xpack.ql.expression.function.aggregate.Count;
|
import org.elasticsearch.xpack.ql.expression.function.aggregate.Count;
|
||||||
|
@ -67,6 +68,7 @@ import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec;
|
||||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.planner.QueryFolder.FoldAggregate.GroupingContext;
|
import org.elasticsearch.xpack.sql.planner.QueryFolder.FoldAggregate.GroupingContext;
|
||||||
import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation;
|
import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation;
|
||||||
|
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.agg.AggFilter;
|
import org.elasticsearch.xpack.sql.querydsl.agg.AggFilter;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram;
|
import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef;
|
import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef;
|
||||||
|
@ -77,6 +79,7 @@ import org.junit.BeforeClass;
|
||||||
|
|
||||||
import java.time.ZoneId;
|
import java.time.ZoneId;
|
||||||
import java.time.ZonedDateTime;
|
import java.time.ZonedDateTime;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -99,6 +102,7 @@ import static org.elasticsearch.xpack.sql.type.SqlDataTypes.DATE;
|
||||||
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import static org.hamcrest.Matchers.endsWith;
|
import static org.hamcrest.Matchers.endsWith;
|
||||||
|
import static org.hamcrest.Matchers.everyItem;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.startsWith;
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
|
@ -132,7 +136,11 @@ public class QueryTranslatorTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private PhysicalPlan optimizeAndPlan(String sql) {
|
private PhysicalPlan optimizeAndPlan(String sql) {
|
||||||
return planner.plan(optimizer.optimize(plan(sql)), true);
|
return optimizeAndPlan(plan(sql));
|
||||||
|
}
|
||||||
|
|
||||||
|
private PhysicalPlan optimizeAndPlan(LogicalPlan plan) {
|
||||||
|
return planner.plan(optimizer.optimize(plan),true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private QueryTranslation translate(Expression condition) {
|
private QueryTranslation translate(Expression condition) {
|
||||||
|
@ -143,6 +151,10 @@ public class QueryTranslatorTests extends ESTestCase {
|
||||||
return QueryTranslator.toQuery(condition, true);
|
return QueryTranslator.toQuery(condition, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private LogicalPlan parameterizedSql(String sql, SqlTypedParamValue... params) {
|
||||||
|
return analyzer.analyze(parser.createStatement(sql, Arrays.asList(params), org.elasticsearch.xpack.ql.type.DateUtils.UTC), true);
|
||||||
|
}
|
||||||
|
|
||||||
public void testTermEqualityAnalyzer() {
|
public void testTermEqualityAnalyzer() {
|
||||||
LogicalPlan p = plan("SELECT some.string FROM test WHERE some.string = 'value'");
|
LogicalPlan p = plan("SELECT some.string FROM test WHERE some.string = 'value'");
|
||||||
assertTrue(p instanceof Project);
|
assertTrue(p instanceof Project);
|
||||||
|
@ -2206,4 +2218,52 @@ public class QueryTranslatorTests extends ESTestCase {
|
||||||
+ "InternalSqlScriptUtils.asDateTime(params.a0),InternalSqlScriptUtils.asDateTime(params.v0)))\",\"lang\":\"painless\","
|
+ "InternalSqlScriptUtils.asDateTime(params.a0),InternalSqlScriptUtils.asDateTime(params.v0)))\",\"lang\":\"painless\","
|
||||||
+ "\"params\":{\"v0\":\"2020-05-03T00:00:00.000Z\"}},\"gap_policy\":\"skip\"}}}}}}"));
|
+ "\"params\":{\"v0\":\"2020-05-03T00:00:00.000Z\"}},\"gap_policy\":\"skip\"}}}}}}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testFoldingWithParamsWithoutIndex() {
|
||||||
|
PhysicalPlan p = optimizeAndPlan(parameterizedSql("SELECT ?, ?, ? FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("integer", 200)));
|
||||||
|
assertThat(p.output(), everyItem(instanceOf(ReferenceAttribute.class)));
|
||||||
|
assertThat(p.output().get(0).toString(), startsWith("?{r}#"));
|
||||||
|
assertThat(p.output().get(1).toString(), startsWith("?{r}#"));
|
||||||
|
assertThat(p.output().get(2).toString(), startsWith("?{r}#"));
|
||||||
|
assertNotEquals(p.output().get(1).id(), p.output().get(2).id());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSameAliasForParamAndField() {
|
||||||
|
PhysicalPlan p = optimizeAndPlan(parameterizedSql("SELECT ?, int as \"?\" FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100)));
|
||||||
|
assertThat(p.output(), everyItem(instanceOf(ReferenceAttribute.class)));
|
||||||
|
assertThat(p.output().get(0).toString(), startsWith("?{r}#"));
|
||||||
|
assertThat(p.output().get(1).toString(), startsWith("?{r}#"));
|
||||||
|
assertNotEquals(p.output().get(0).id(), p.output().get(1).id());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSameAliasOnSameField() {
|
||||||
|
PhysicalPlan p = optimizeAndPlan(parameterizedSql("SELECT int as \"int\", int as \"int\" FROM test"));
|
||||||
|
assertThat(p.output(), everyItem(instanceOf(ReferenceAttribute.class)));
|
||||||
|
assertThat(p.output().get(0).toString(), startsWith("int{r}#"));
|
||||||
|
assertThat(p.output().get(1).toString(), startsWith("int{r}#"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFoldingWithMixedParamsWithoutAlias() {
|
||||||
|
PhysicalPlan p = optimizeAndPlan(parameterizedSql("SELECT ?, ? FROM test",
|
||||||
|
new SqlTypedParamValue("integer", 100),
|
||||||
|
new SqlTypedParamValue("text", "200")));
|
||||||
|
assertThat(p.output(), everyItem(instanceOf(ReferenceAttribute.class)));
|
||||||
|
assertThat(p.output().get(0).toString(), startsWith("?{r}#"));
|
||||||
|
assertThat(p.output().get(1).toString(), startsWith("?{r}#"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSameExpressionWithoutAlias() {
|
||||||
|
PhysicalPlan physicalPlan = optimizeAndPlan("SELECT 100, 100 FROM test");
|
||||||
|
assertEquals(EsQueryExec.class, physicalPlan.getClass());
|
||||||
|
EsQueryExec eqe = (EsQueryExec) physicalPlan;
|
||||||
|
assertEquals(2, eqe.output().size());
|
||||||
|
assertThat(eqe.output().get(0).toString(), startsWith("100{r}#"));
|
||||||
|
assertThat(eqe.output().get(1).toString(), startsWith("100{r}#"));
|
||||||
|
// these two should be semantically different reference attributes
|
||||||
|
assertNotEquals(eqe.output().get(0).id(), eqe.output().get(1).id());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue