SQL: Forbid multi field groups (elastic/x-pack-elasticsearch#3199)
* SQL: GROUP BY with multiple fields are forbidden The check is performed in the folder Verifier as the optimizer can eliminate some fields (like those with constants) Original commit: elastic/x-pack-elasticsearch@8d49f4ab02
This commit is contained in:
parent
4e49769efb
commit
88b8794801
|
@ -15,7 +15,7 @@ import java.sql.SQLException;
|
|||
import java.util.List;
|
||||
|
||||
@TestLogging(JdbcTestUtils.SQL_TRACE)
|
||||
public abstract class DebugCsvSpec extends CsvSpecTestCase {
|
||||
public class DebugCsvSpec extends CsvSpecTestCase {
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
|
@ -27,6 +27,7 @@ public abstract class DebugCsvSpec extends CsvSpecTestCase {
|
|||
super(fileName, groupName, testName, lineNumber, testCase);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException {
|
||||
Logger log = logEsResultSet() ? logger : null;
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
|
|||
import java.util.List;
|
||||
|
||||
@TestLogging(JdbcTestUtils.SQL_TRACE)
|
||||
public abstract class DebugSqlSpec extends SqlSpecTestCase {
|
||||
public class DebugSqlSpec extends SqlSpecTestCase {
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
Parser parser = specParser();
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//
|
||||
|
||||
debug
|
||||
SELECT DAY_OF_YEAR(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no;
|
||||
SELECT int FROM test GROUP BY AVG(int) + 2;
|
||||
|
||||
table:s
|
||||
test_emp
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
//
|
||||
|
||||
debug
|
||||
SELECT MONTH(birth_date) AS d, COUNT(*) AS c, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY MONTH(birth_date) ORDER BY MONTH(birth_date) DESC;
|
||||
SELECT int FROM test GROUP BY AVG(int) + 2;
|
||||
|
|
|
@ -150,9 +150,8 @@ abstract class Verifier {
|
|||
|
||||
// Concrete verifications
|
||||
|
||||
//
|
||||
//
|
||||
// if there are no (major) unresolved failures, do more in-depth analysis
|
||||
//
|
||||
|
||||
if (failures.isEmpty()) {
|
||||
Map<String, Function> resolvedFunctions = new LinkedHashMap<>();
|
||||
|
@ -183,14 +182,14 @@ abstract class Verifier {
|
|||
if (!groupingFailures.contains(p)) {
|
||||
checkGroupBy(p, localFailures, resolvedFunctions, groupingFailures);
|
||||
}
|
||||
// everything checks out
|
||||
// mark the plan as analyzed
|
||||
if (localFailures.isEmpty()) {
|
||||
p.setAnalyzed();
|
||||
}
|
||||
// everything checks out
|
||||
// mark the plan as analyzed
|
||||
if (localFailures.isEmpty()) {
|
||||
p.setAnalyzed();
|
||||
}
|
||||
|
||||
failures.addAll(localFailures);
|
||||
});
|
||||
failures.addAll(localFailures);
|
||||
});
|
||||
}
|
||||
|
||||
return failures;
|
||||
|
@ -252,7 +251,7 @@ abstract class Verifier {
|
|||
Expressions.names(a.groupings())));
|
||||
groupingFailures.add(a);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
@ -286,7 +285,7 @@ abstract class Verifier {
|
|||
a.aggregates().forEach(ne ->
|
||||
ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions)));
|
||||
|
||||
if (!missing.isEmpty()) {
|
||||
if (!missing.isEmpty()) {
|
||||
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
|
||||
localFailures.add(fail(missing.values().iterator().next(), "Cannot use non-grouped column" + plural + " %s, expected %s",
|
||||
Expressions.names(missing.keySet()),
|
||||
|
@ -307,9 +306,9 @@ abstract class Verifier {
|
|||
// TODO: this should be handled by a different rule
|
||||
if (function == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
e = function;
|
||||
}
|
||||
}
|
||||
|
||||
// scalar functions can be a binary tree
|
||||
// first test the function against the grouping
|
||||
|
@ -319,11 +318,11 @@ abstract class Verifier {
|
|||
// found group for the expression
|
||||
if (Expressions.anyMatch(groupings, e::semanticEquals)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// unwrap function to find the base
|
||||
for (Expression arg : sf.arguments()) {
|
||||
arg.collectFirstChildren(c -> checkGroupMatch(c, source, groupings, missing, functions));
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ import static java.util.stream.Collectors.toList;
|
|||
|
||||
public abstract class Expressions {
|
||||
|
||||
public static List<NamedExpression> asNamed(List<Expression> exp) {
|
||||
public static List<NamedExpression> asNamed(List<? extends Expression> exp) {
|
||||
return exp.stream()
|
||||
.map(NamedExpression.class::cast)
|
||||
.collect(toList());
|
||||
|
@ -72,7 +72,7 @@ public abstract class Expressions {
|
|||
return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.nodeName();
|
||||
}
|
||||
|
||||
public static List<String> names(Collection<Expression> e) {
|
||||
public static List<String> names(Collection<? extends Expression> e) {
|
||||
List<String> names = new ArrayList<>(e.size());
|
||||
for (Expression ex : e) {
|
||||
names.add(name(ex));
|
||||
|
|
|
@ -7,9 +7,13 @@ package org.elasticsearch.xpack.sql.planner;
|
|||
|
||||
import org.elasticsearch.xpack.sql.ClientSqlException;
|
||||
import org.elasticsearch.xpack.sql.planner.Verifier.Failure;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.lang.String.format;
|
||||
|
||||
public class PlanningException extends ClientSqlException {
|
||||
|
||||
|
@ -21,11 +25,9 @@ public class PlanningException extends ClientSqlException {
|
|||
super(extractMessage(sources));
|
||||
}
|
||||
|
||||
private static String extractMessage(Collection<Failure> sources) {
|
||||
StringJoiner sj = new StringJoiner(",", "{", "}");
|
||||
sources.forEach(s -> {
|
||||
sj.add(s.source().nodeString() + s.source().location());
|
||||
});
|
||||
return "Fail to plan items " + sj.toString();
|
||||
private static String extractMessage(Collection<Failure> failures) {
|
||||
return failures.stream()
|
||||
.map(f -> format(Locale.ROOT, "line %s:%s: %s", f.source().location().getLineNumber(), f.source().location().getColumnNumber(), f.message()))
|
||||
.collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,15 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.AggregateExec;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.Unexecutable;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec;
|
||||
import org.elasticsearch.xpack.sql.tree.Node;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
abstract class Verifier {
|
||||
|
||||
static class Failure {
|
||||
|
@ -22,7 +24,7 @@ abstract class Verifier {
|
|||
|
||||
Failure(Node<?> source, String message) {
|
||||
this.source = source;
|
||||
this.message = message + " " + source.nodeString();
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
Node<?> source() {
|
||||
|
@ -69,11 +71,23 @@ abstract class Verifier {
|
|||
failures.add(fail(e, "Unresolved expression"));
|
||||
}
|
||||
});
|
||||
|
||||
if (p instanceof AggregateExec) {
|
||||
forbidMultiFieldGroupBy((AggregateExec) p, failures);
|
||||
}
|
||||
});
|
||||
|
||||
return failures;
|
||||
}
|
||||
|
||||
private static void forbidMultiFieldGroupBy(AggregateExec a, List<Failure> failures) {
|
||||
if (a.groupings().size() > 1) {
|
||||
failures.add(fail(a.groupings().get(0), "Currently, only a single expression can be used with GROUP BY; please select one of "
|
||||
+ Expressions.names(a.groupings())));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static List<Failure> verifyExecutingPlan(PhysicalPlan plan) {
|
||||
List<Failure> failures = new ArrayList<>();
|
||||
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
|
||||
import org.elasticsearch.xpack.sql.analysis.catalog.Catalog;
|
||||
import org.elasticsearch.xpack.sql.analysis.catalog.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.catalog.InMemoryCatalog;
|
||||
import org.elasticsearch.xpack.sql.expression.function.DefaultFunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.optimizer.Optimizer;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.session.TestingSqlSession;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
public class VerifierErrorMessagesTests extends ESTestCase {
|
||||
|
||||
private SqlParser parser;
|
||||
private FunctionRegistry functionRegistry;
|
||||
private Catalog catalog;
|
||||
private Analyzer analyzer;
|
||||
private Optimizer optimizer;
|
||||
private Planner planner;
|
||||
|
||||
public VerifierErrorMessagesTests() {
|
||||
parser = new SqlParser();
|
||||
functionRegistry = new DefaultFunctionRegistry();
|
||||
|
||||
Map<String, DataType> mapping = new LinkedHashMap<>();
|
||||
mapping.put("bool", DataTypes.BOOLEAN);
|
||||
mapping.put("int", DataTypes.INTEGER);
|
||||
mapping.put("text", DataTypes.TEXT);
|
||||
mapping.put("keyword", DataTypes.KEYWORD);
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
catalog = new InMemoryCatalog(singletonList(test));
|
||||
analyzer = new Analyzer(functionRegistry);
|
||||
optimizer = new Optimizer();
|
||||
planner = new Planner();
|
||||
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupContext() {
|
||||
TestingSqlSession.setCurrentContext(TestingSqlSession.ctx(catalog));
|
||||
}
|
||||
|
||||
@After
|
||||
public void disposeContext() {
|
||||
TestingSqlSession.removeCurrentContext();
|
||||
}
|
||||
|
||||
private String verify(String sql) {
|
||||
PlanningException e = expectThrows(PlanningException.class,
|
||||
() -> planner.mapPlan(optimizer.optimize(analyzer.analyze(parser.createStatement(sql), true)), true));
|
||||
assertTrue(e.getMessage().startsWith("Found "));
|
||||
String header = "Found 1 problem(s)\nline ";
|
||||
return e.getMessage().substring(header.length());
|
||||
}
|
||||
|
||||
|
||||
public void testMultiGroupBy() {
|
||||
// TODO: location needs to be updated after merging extend-having
|
||||
assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]",
|
||||
verify("SELECT bool FROM test GROUP BY bool, keyword"));
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue