diff --git a/core/src/main/java/org/apache/druid/utils/Throwables.java b/core/src/main/java/org/apache/druid/utils/Throwables.java index 9aaad863171..b58025821f3 100644 --- a/core/src/main/java/org/apache/druid/utils/Throwables.java +++ b/core/src/main/java/org/apache/druid/utils/Throwables.java @@ -19,17 +19,27 @@ package org.apache.druid.utils; +import javax.annotation.Nullable; + public final class Throwables { - public static boolean isThrowable(Throwable t, Class searchFor) + /** + * searches for a throwable in the cause chain that is of same type as {@param searchFor}. The class of returned + * throwable will either be same as {@param searchFor} or a sub-class of {@param searchFor}. + * @param t - the throwable in which to search + * @param searchFor - Class of throwable to search for + * @return null if not found otherwise the cause exception that is of same type as searchFor + */ + @Nullable + public static Throwable getCauseOfType(Throwable t, Class searchFor) { - if (t.getClass().isAssignableFrom(searchFor)) { - return true; + if (searchFor.isAssignableFrom(t.getClass())) { + return t; } else { if (t.getCause() != null) { - return isThrowable(t.getCause(), searchFor); + return getCauseOfType(t.getCause(), searchFor); } else { - return false; + return null; } } } diff --git a/core/src/test/java/org/apache/druid/utils/ThrowablesTest.java b/core/src/test/java/org/apache/druid/utils/ThrowablesTest.java index eaa34cd72c5..bb5dcab267a 100644 --- a/core/src/test/java/org/apache/druid/utils/ThrowablesTest.java +++ b/core/src/test/java/org/apache/druid/utils/ThrowablesTest.java @@ -19,30 +19,42 @@ package org.apache.druid.utils; +import org.apache.druid.java.util.common.ISE; import org.junit.Assert; import org.junit.Test; public class ThrowablesTest { @Test - public void testIsThrowableItself() + public void testGetCauseOfType_Itself() { - Assert.assertTrue(Throwables.isThrowable(new NoClassDefFoundError(), NoClassDefFoundError.class)); + Throwable th = new NoClassDefFoundError(); + Assert.assertSame(th, Throwables.getCauseOfType(th, NoClassDefFoundError.class)); } @Test - public void testIsThrowableNestedThrowable() + public void testGetCauseOfType_NestedThrowable() { - Assert.assertTrue( - Throwables.isThrowable(new RuntimeException(new NoClassDefFoundError()), NoClassDefFoundError.class) + Throwable th = new NoClassDefFoundError(); + Assert.assertSame(th, + Throwables.getCauseOfType(new RuntimeException(th), NoClassDefFoundError.class) ); } @Test - public void testIsThrowableNonTarget() + public void testGetCauseOfType_NestedThrowableSubclass() { - Assert.assertFalse( - Throwables.isThrowable(new RuntimeException(new ClassNotFoundException()), NoClassDefFoundError.class) + Throwable th = new ISE("something"); + Assert.assertSame(th, + Throwables.getCauseOfType(new RuntimeException(th), IllegalStateException.class) + ); + } + + @Test + public void testGetCauseOfType_NonTarget() + { + Assert.assertNull( + Throwables.getCauseOfType(new RuntimeException(new ClassNotFoundException()), NoClassDefFoundError.class) ); } } diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index 23be4e5bd3d..2620f4d9602 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -318,9 +318,9 @@ public class QueryLifecycle if (e != null) { statsMap.put("exception", e.toString()); if (QueryContexts.isDebug(baseQuery)) { - log.error(e, "Exception while processing queryId [%s]", baseQuery.getId()); + log.warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); } else { - log.noStackTrace().error(e, "Exception while processing queryId [%s]", baseQuery.getId()); + log.noStackTrace().warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); } if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) { // Mimic behavior from QueryResource, where this code was originally taken from. diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/ArraySqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/ArraySqlAggregator.java index d15860be981..56f1fe8a5dd 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/ArraySqlAggregator.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/ArraySqlAggregator.java @@ -37,7 +37,6 @@ import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.calcite.util.Optionality; import org.apache.druid.java.util.common.HumanReadableBytes; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory; @@ -50,9 +49,11 @@ import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.Expressions; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; import javax.annotation.Nullable; + import java.util.List; import java.util.Objects; import java.util.stream.Collectors; @@ -184,7 +185,7 @@ public class ArraySqlAggregator implements SqlAggregator { RelDataType type = sqlOperatorBinding.getOperandType(0); if (SqlTypeUtil.isArray(type)) { - throw new ISE("Cannot use ARRAY_AGG on array inputs %s", type); + throw new UnsupportedSQLQueryException("Cannot use ARRAY_AGG on array inputs %s", type); } return Calcites.createSqlArrayTypeWithNullability( sqlOperatorBinding.getTypeFactory(), diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/EarliestLatestAnySqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/EarliestLatestAnySqlAggregator.java index 560708f439c..224ecf4575f 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/EarliestLatestAnySqlAggregator.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/EarliestLatestAnySqlAggregator.java @@ -60,9 +60,11 @@ import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.Expressions; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; import javax.annotation.Nullable; + import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -90,7 +92,7 @@ public class EarliestLatestAnySqlAggregator implements SqlAggregator case COMPLEX: return new StringFirstAggregatorFactory(name, fieldName, timeColumn, maxStringBytes); default: - throw new ISE("Cannot build EARLIEST aggregatorFactory for type[%s]", type); + throw new UnsupportedSQLQueryException("EARLIEST aggregator is not supported for '%s' type", type); } } }, @@ -110,7 +112,7 @@ public class EarliestLatestAnySqlAggregator implements SqlAggregator case COMPLEX: return new StringLastAggregatorFactory(name, fieldName, timeColumn, maxStringBytes); default: - throw new ISE("Cannot build LATEST aggregatorFactory for type[%s]", type); + throw new UnsupportedSQLQueryException("LATEST aggregator is not supported for '%s' type", type); } } }, @@ -129,7 +131,7 @@ public class EarliestLatestAnySqlAggregator implements SqlAggregator case STRING: return new StringAnyAggregatorFactory(name, fieldName, maxStringBytes); default: - throw new ISE("Cannot build ANY aggregatorFactory for type[%s]", type); + throw new UnsupportedSQLQueryException("ANY aggregation is not supported for '%s' type", type); } } }; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java index 06633d8cf6a..e16e597cf8e 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.sql.calcite.aggregation.builtin; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -32,6 +31,7 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.sql.calcite.aggregation.Aggregation; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; public class MaxSqlAggregator extends SimpleSqlAggregator { @@ -73,7 +73,7 @@ public class MaxSqlAggregator extends SimpleSqlAggregator case DOUBLE: return new DoubleMaxAggregatorFactory(name, fieldName, expression, macroTable); default: - throw new ISE("Cannot create aggregator factory for type[%s]", aggregationType); + throw new UnsupportedSQLQueryException("Max aggregation is not supported for '%s' type", aggregationType); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java index 373a7c683ac..e6112802bb7 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.sql.calcite.aggregation.builtin; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; @@ -31,6 +30,7 @@ import org.apache.druid.query.aggregation.LongMinAggregatorFactory; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.sql.calcite.aggregation.Aggregation; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; public class MinSqlAggregator extends SimpleSqlAggregator { @@ -69,7 +69,7 @@ public class MinSqlAggregator extends SimpleSqlAggregator case DOUBLE: return new DoubleMinAggregatorFactory(name, fieldName, expression, macroTable); default: - throw new ISE("Cannot create aggregator factory for type[%s]", aggregationType); + throw new UnsupportedSQLQueryException("MIN aggregator is not supported for '%s' type", aggregationType); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java index bea89124544..608941a1dc7 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.sql.calcite.aggregation.builtin; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -32,6 +31,7 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.sql.calcite.aggregation.Aggregation; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; public class SumSqlAggregator extends SimpleSqlAggregator { @@ -73,7 +73,7 @@ public class SumSqlAggregator extends SimpleSqlAggregator case DOUBLE: return new DoubleSumAggregatorFactory(name, fieldName, expression, macroTable); default: - throw new ISE("Cannot create aggregator factory for type[%s]", aggregationType); + throw new UnsupportedSQLQueryException("Sum aggregation is not supported for '%s' type", aggregationType); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 8a4ae9a8201..eeb4e4f8254 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -70,8 +70,10 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.guava.BaseSequence; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; +import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContexts; import org.apache.druid.segment.DimensionHandlerUtils; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.Resource; @@ -83,6 +85,7 @@ import org.apache.druid.sql.calcite.rel.DruidRel; import org.apache.druid.sql.calcite.rel.DruidUnionRel; import org.apache.druid.sql.calcite.run.QueryMaker; import org.apache.druid.sql.calcite.run.QueryMakerFactory; +import org.apache.druid.utils.Throwables; import javax.annotation.Nullable; @@ -204,20 +207,38 @@ public class DruidPlanner implements Closeable try { return planWithDruidConvention(rootQueryRel, parsed.getExplainNode(), parsed.getInsertNode()); } - catch (RelOptPlanner.CannotPlanException e) { - if (parsed.getInsertNode() == null) { - // Try again with BINDABLE convention. Used for querying Values and metadata tables. - try { - return planWithBindableConvention(rootQueryRel, parsed.getExplainNode()); - } - catch (Exception e2) { - e.addSuppressed(e2); - throw e; - } - } else { + catch (Exception e) { + Throwable cannotPlanException = Throwables.getCauseOfType(e, RelOptPlanner.CannotPlanException.class); + if (null == cannotPlanException) { + // Not a CannotPlanningException, rethrow without trying with bindable + throw e; + } + if (parsed.getInsertNode() != null) { // Cannot INSERT with BINDABLE. throw e; } + // Try again with BINDABLE convention. Used for querying Values and metadata tables. + try { + return planWithBindableConvention(rootQueryRel, parsed.getExplainNode()); + } + catch (Exception e2) { + e.addSuppressed(e2); + Logger logger = log; + if (!QueryContexts.isDebug(plannerContext.getQueryContext())) { + logger = log.noStackTrace(); + } + logger.warn(e, "Failed to plan the query '%s'", plannerContext.getSql()); + String errorMessage = plannerContext.getPlanningError(); + if (null == errorMessage && cannotPlanException instanceof UnsupportedSQLQueryException) { + errorMessage = cannotPlanException.getMessage(); + } + if (null == errorMessage) { + errorMessage = "Please check broker logs for more details"; + } else { + errorMessage = "Possible error: " + errorMessage; + } + throw new UnsupportedSQLQueryException(errorMessage); + } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java index dda69f7aff1..65e85641ff7 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java @@ -25,7 +25,6 @@ import org.apache.calcite.rex.RexExecutor; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.IAE; import org.apache.druid.math.expr.Expr; import org.apache.druid.math.expr.ExprEval; import org.apache.druid.math.expr.ExprType; @@ -94,7 +93,7 @@ public class DruidRexExecutor implements RexExecutor // as a primitive long/float/double. // ExprEval.isNumericNull checks whether the parsed primitive value is null or not. if (!constExp.getType().isNullable() && exprResult.isNumericNull()) { - throw new IAE("Illegal DATE constant: %s", constExp); + throw new UnsupportedSQLQueryException("Illegal DATE constant: %s", constExp); } literal = rexBuilder.makeDateLiteral( @@ -108,7 +107,7 @@ public class DruidRexExecutor implements RexExecutor // as a primitive long/float/double. // ExprEval.isNumericNull checks whether the parsed primitive value is null or not. if (!constExp.getType().isNullable() && exprResult.isNumericNull()) { - throw new IAE("Illegal TIMESTAMP constant: %s", constExp); + throw new UnsupportedSQLQueryException("Illegal TIMESTAMP constant: %s", constExp); } literal = rexBuilder.makeTimestampLiteral( @@ -134,7 +133,7 @@ public class DruidRexExecutor implements RexExecutor double exprResultDouble = exprResult.asDouble(); if (Double.isNaN(exprResultDouble) || Double.isInfinite(exprResultDouble)) { String expression = druidExpression.getExpression(); - throw new IAE("'%s' evaluates to '%s' that is not supported in SQL. You can either cast the expression as bigint ('cast(%s as bigint)') or char ('cast(%s as char)') or change the expression itself", + throw new UnsupportedSQLQueryException("'%s' evaluates to '%s' that is not supported in SQL. You can either cast the expression as bigint ('cast(%s as bigint)') or char ('cast(%s as char)') or change the expression itself", expression, Double.toString(exprResultDouble), expression, @@ -157,7 +156,7 @@ public class DruidRexExecutor implements RexExecutor doubleVal -> { if (Double.isNaN(doubleVal) || Double.isInfinite(doubleVal)) { String expression = druidExpression.getExpression(); - throw new IAE( + throw new UnsupportedSQLQueryException( "'%s' contains an element that evaluates to '%s' which is not supported in SQL. You can either cast the element in the array to bigint or char or change the expression itself", expression, Double.toString(doubleVal) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 82e1ddc65c1..8bca6e2a4df 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -31,6 +31,7 @@ import org.apache.calcite.schema.SchemaPlus; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Numbers; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.BaseQuery; import org.apache.druid.server.security.Access; @@ -43,6 +44,7 @@ import org.joda.time.DateTimeZone; import org.joda.time.Interval; import javax.annotation.Nullable; + import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -89,6 +91,9 @@ public class PlannerContext private Set resourceActions = null; // result of authorizing set of resources against authentication identity private Access authorizationResult; + // error messages encountered while planning the query + @Nullable + private String planningError; private QueryMaker queryMaker; private PlannerContext( @@ -252,6 +257,23 @@ public class PlannerContext this.nativeQueryIds.add(queryId); } + @Nullable + public String getPlanningError() + { + return planningError; + } + + /** + * Sets the planning error in the context that will be shown to the user if the SQL query cannot be translated + * to a native query. This error is often a hint and thus should be phrased as such. Also, the final plan can + * be very different from SQL that user has written. So again, the error should be phrased to indicate this gap + * clearly. + */ + public void setPlanningError(String formatText, Object... arguments) + { + planningError = StringUtils.nonStrictFormat(formatText, arguments); + } + public DataContext createDataContext(final JavaTypeFactory typeFactory, List parameters) { class DruidDataContext implements DataContext diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/UnsupportedSQLQueryException.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/UnsupportedSQLQueryException.java new file mode 100644 index 00000000000..8c4576f1fab --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/UnsupportedSQLQueryException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.planner; + +import org.apache.calcite.plan.RelOptPlanner; +import org.apache.druid.java.util.common.StringUtils; + +/** + * This class is different from {@link RelOptPlanner.CannotPlanException} in that the error + * messages are user-friendly unlike its parent class. This exception class should be used instead of + * {@link org.apache.druid.java.util.common.ISE} or {@link org.apache.druid.java.util.common.IAE} when processing is + * to be halted during planning. Similarly, Druid planner can catch this exception and know that the error + * can be directly exposed to end-users. + */ +public class UnsupportedSQLQueryException extends RelOptPlanner.CannotPlanException +{ + public UnsupportedSQLQueryException(String formatText, Object... arguments) + { + super(StringUtils.nonStrictFormat(formatText, arguments)); + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidJoinQueryRel.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidJoinQueryRel.java index e651433ccac..7daf70e6e83 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidJoinQueryRel.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidJoinQueryRel.java @@ -38,7 +38,6 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlKind; -import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; @@ -54,9 +53,11 @@ import org.apache.druid.sql.calcite.expression.Expressions; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.table.RowSignatures; import javax.annotation.Nullable; + import java.util.HashSet; import java.util.List; import java.util.Set; @@ -335,7 +336,7 @@ public class DruidJoinQueryRel extends DruidRel case INNER: return JoinType.INNER; default: - throw new IAE("Cannot handle joinType[%s]", calciteJoinType); + throw new UnsupportedSQLQueryException("Cannot handle joinType '%s'", calciteJoinType); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java index a662f11ddc6..2292781adaa 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java @@ -87,6 +87,7 @@ import org.apache.druid.sql.calcite.table.RowSignatures; import javax.annotation.Nonnull; import javax.annotation.Nullable; + import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -405,6 +406,7 @@ public class DruidQuery final ColumnType outputType = Calcites.getColumnTypeForRelDataType(dataType); if (Types.isNullOr(outputType, ValueType.COMPLEX)) { // Can't group on unknown or COMPLEX types. + plannerContext.setPlanningError("SQL requires a group-by on a column of type %s that is unsupported.", outputType); throw new CannotBuildQueryException(aggregate, rexNode); } @@ -515,6 +517,9 @@ public class DruidQuery ); if (aggregation == null) { + if (null == plannerContext.getPlanningError()) { + plannerContext.setPlanningError("Aggregation [%s] is not supported", aggCall); + } throw new CannotBuildQueryException(aggregate, aggCall); } @@ -1149,6 +1154,8 @@ public class DruidQuery || orderByColumns.stream() .anyMatch(orderBy -> !orderBy.getColumnName().equals(ColumnHolder.TIME_COLUMN_NAME)))) { // Cannot handle this ordering. + // Scan cannot ORDER BY non-time columns. + plannerContext.setPlanningError("SQL query requires order by non-time column %s that is not supported.", orderByColumns); return null; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidUnionDataSourceRel.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidUnionDataSourceRel.java index 5b3d127304a..15e363af1e2 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidUnionDataSourceRel.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidUnionDataSourceRel.java @@ -124,12 +124,17 @@ public class DruidUnionDataSourceRel extends DruidRel for (final RelNode relNode : unionRel.getInputs()) { final DruidRel druidRel = (DruidRel) relNode; if (!DruidRels.isScanOrMapping(druidRel, false)) { + getPlannerContext().setPlanningError("SQL requires union between inputs that are not simple table scans " + + "and involve a filter or aliasing"); throw new CannotBuildQueryException(druidRel); } final DruidQuery query = druidRel.toDruidQuery(false); final DataSource dataSource = query.getDataSource(); if (!(dataSource instanceof TableDataSource)) { + getPlannerContext().setPlanningError("SQL requires union with input of '%s' type that is not supported." + + " Union operation is only supported between regular tables. ", + dataSource.getClass().getSimpleName()); throw new CannotBuildQueryException(druidRel); } @@ -140,6 +145,7 @@ public class DruidUnionDataSourceRel extends DruidRel if (signature.getColumnNames().equals(query.getOutputRowSignature().getColumnNames())) { dataSources.add((TableDataSource) dataSource); } else { + getPlannerContext().setPlanningError("There is a mismatch between the output row signature of input tables and the row signature of union output."); throw new CannotBuildQueryException(druidRel); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java index 46ff31f406c..8844b00d57e 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java @@ -43,6 +43,8 @@ import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.ImmutableBitSet; import org.apache.druid.java.util.common.Pair; import org.apache.druid.query.LookupDataSource; +import org.apache.druid.query.QueryContexts; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.DruidJoinQueryRel; import org.apache.druid.sql.calcite.rel.DruidQueryRel; import org.apache.druid.sql.calcite.rel.DruidRel; @@ -59,12 +61,11 @@ import java.util.stream.Collectors; public class DruidJoinRule extends RelOptRule { - private static final DruidJoinRule INSTANCE = new DruidJoinRule(true); - private static final DruidJoinRule LEFT_SCAN_AS_QUERY = new DruidJoinRule(false); private final boolean enableLeftScanDirect; + private final PlannerContext plannerContext; - private DruidJoinRule(final boolean enableLeftScanDirect) + private DruidJoinRule(final PlannerContext plannerContext) { super( operand( @@ -73,12 +74,13 @@ public class DruidJoinRule extends RelOptRule operand(DruidRel.class, any()) ) ); - this.enableLeftScanDirect = enableLeftScanDirect; + this.enableLeftScanDirect = QueryContexts.getEnableJoinLeftScanDirect(plannerContext.getQueryContext()); + this.plannerContext = plannerContext; } - public static DruidJoinRule instance(boolean enableLeftScanDirect) + public static DruidJoinRule instance(PlannerContext plannerContext) { - return enableLeftScanDirect ? INSTANCE : LEFT_SCAN_AS_QUERY; + return new DruidJoinRule(plannerContext); } @Override @@ -220,7 +222,7 @@ public class DruidJoinRule extends RelOptRule * Returns whether {@link #analyzeCondition} would return something. */ @VisibleForTesting - static boolean canHandleCondition(final RexNode condition, final RelDataType leftRowType, DruidRel right) + boolean canHandleCondition(final RexNode condition, final RelDataType leftRowType, DruidRel right) { return analyzeCondition(condition, leftRowType, right).isPresent(); } @@ -229,7 +231,7 @@ public class DruidJoinRule extends RelOptRule * If this condition is an AND of some combination of (1) literals; (2) equality conditions of the form * {@code f(LeftRel) = RightColumn}, then return a {@link ConditionAnalysis}. */ - private static Optional analyzeCondition( + private Optional analyzeCondition( final RexNode condition, final RelDataType leftRowType, DruidRel right @@ -265,6 +267,8 @@ public class DruidJoinRule extends RelOptRule if (!subCondition.isA(SqlKind.EQUALS)) { // If it's not EQUALS, it's not supported. + plannerContext.setPlanningError("SQL requires a join with '%s' condition that is not supported.", + subCondition.getKind()); return Optional.empty(); } @@ -280,6 +284,7 @@ public class DruidJoinRule extends RelOptRule rightColumns.add((RexInputRef) operands.get(0)); } else { // Cannot handle this condition. + plannerContext.setPlanningError("SQL is resulting in a join that have unsupported operand types."); return Optional.empty(); } } @@ -294,6 +299,7 @@ public class DruidJoinRule extends RelOptRule if (distinctRightColumns > 1) { // it means that the join's right side is lookup and the join condition contains both key and value columns of lookup. // currently, the lookup datasource in the native engine doesn't support using value column in the join condition. + plannerContext.setPlanningError("SQL is resulting in a join involving lookup where value column is used in the condition."); return Optional.empty(); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRule.java index e66f5ae6afd..df8c5162056 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRule.java @@ -25,11 +25,11 @@ import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.rel.logical.LogicalValues; import org.apache.calcite.rex.RexLiteral; -import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.InlineDataSource; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.rel.DruidQueryRel; import org.apache.druid.sql.calcite.table.DruidTable; import org.apache.druid.sql.calcite.table.RowSignatures; @@ -120,7 +120,7 @@ public class DruidLogicalValuesRule extends RelOptRule case TIME: case TIME_WITH_LOCAL_TIME_ZONE: default: - throw new IAE("Unsupported type[%s]", literal.getTypeName()); + throw new UnsupportedSQLQueryException("%s type is not supported", literal.getType().getSqlTypeName()); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRules.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRules.java index 6ba4dc75b2f..113f37d896f 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRules.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRules.java @@ -29,7 +29,6 @@ import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.Sort; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.QueryContexts; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.DruidOuterQueryRel; import org.apache.druid.sql.calcite.rel.DruidRel; @@ -51,7 +50,6 @@ public class DruidRules public static List rules(PlannerContext plannerContext) { - boolean enableLeftScanDirect = QueryContexts.getEnableJoinLeftScanDirect(plannerContext.getQueryContext()); return ImmutableList.of( new DruidQueryRule<>( Filter.class, @@ -92,10 +90,10 @@ public class DruidRules DruidOuterQueryRule.WHERE_FILTER, DruidOuterQueryRule.SELECT_PROJECT, DruidOuterQueryRule.SORT, - DruidUnionRule.instance(), - DruidUnionDataSourceRule.instance(), + new DruidUnionRule(plannerContext), + new DruidUnionDataSourceRule(plannerContext), DruidSortUnionRule.instance(), - DruidJoinRule.instance(enableLeftScanDirect) + DruidJoinRule.instance(plannerContext) ); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRule.java index ae3ff39009c..99f6248b37d 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRule.java @@ -27,6 +27,7 @@ import org.apache.calcite.util.mapping.Mappings; import org.apache.druid.java.util.common.ISE; import org.apache.druid.query.TableDataSource; import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.DruidQueryRel; import org.apache.druid.sql.calcite.rel.DruidRel; import org.apache.druid.sql.calcite.rel.DruidRels; @@ -34,7 +35,10 @@ import org.apache.druid.sql.calcite.rel.DruidUnionDataSourceRel; import org.apache.druid.sql.calcite.rel.PartialDruidQuery; import org.apache.druid.sql.calcite.table.DruidTable; +import javax.annotation.Nullable; + import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Optional; @@ -44,9 +48,9 @@ import java.util.Optional; */ public class DruidUnionDataSourceRule extends RelOptRule { - private static final DruidUnionDataSourceRule INSTANCE = new DruidUnionDataSourceRule(); + private final PlannerContext plannerContext; - private DruidUnionDataSourceRule() + public DruidUnionDataSourceRule(final PlannerContext plannerContext) { super( operand( @@ -55,11 +59,7 @@ public class DruidUnionDataSourceRule extends RelOptRule operand(DruidQueryRel.class, none()) ) ); - } - - public static DruidUnionDataSourceRule instance() - { - return INSTANCE; + this.plannerContext = plannerContext; } @Override @@ -69,7 +69,7 @@ public class DruidUnionDataSourceRule extends RelOptRule final DruidRel firstDruidRel = call.rel(1); final DruidQueryRel secondDruidRel = call.rel(2); - return isCompatible(unionRel, firstDruidRel, secondDruidRel); + return isCompatible(unionRel, firstDruidRel, secondDruidRel, plannerContext); } @Override @@ -90,7 +90,7 @@ public class DruidUnionDataSourceRule extends RelOptRule call.transformTo( DruidUnionDataSourceRel.create( (Union) newUnionRel, - getColumnNamesIfTableOrUnion(firstDruidRel).get(), + getColumnNamesIfTableOrUnion(firstDruidRel, plannerContext).get(), firstDruidRel.getPlannerContext() ) ); @@ -103,7 +103,7 @@ public class DruidUnionDataSourceRule extends RelOptRule call.transformTo( DruidUnionDataSourceRel.create( unionRel, - getColumnNamesIfTableOrUnion(firstDruidRel).get(), + getColumnNamesIfTableOrUnion(firstDruidRel, plannerContext).get(), firstDruidRel.getPlannerContext() ) ); @@ -112,22 +112,39 @@ public class DruidUnionDataSourceRule extends RelOptRule // Can only do UNION ALL of inputs that have compatible schemas (or schema mappings) and right side // is a simple table scan - public static boolean isCompatible(final Union unionRel, final DruidRel first, final DruidRel second) + public static boolean isCompatible(final Union unionRel, final DruidRel first, final DruidRel second, @Nullable PlannerContext plannerContext) { if (!(second instanceof DruidQueryRel)) { return false; } - return unionRel.all && isUnionCompatible(first, second); + if (!unionRel.all && null != plannerContext) { + plannerContext.setPlanningError("SQL requires 'UNION' but only 'UNION ALL' is supported."); + } + return unionRel.all && isUnionCompatible(first, second, plannerContext); } - private static boolean isUnionCompatible(final DruidRel first, final DruidRel second) + private static boolean isUnionCompatible(final DruidRel first, final DruidRel second, @Nullable PlannerContext plannerContext) { - final Optional> columnNames = getColumnNamesIfTableOrUnion(first); - return columnNames.isPresent() && columnNames.equals(getColumnNamesIfTableOrUnion(second)); + final Optional> firstColumnNames = getColumnNamesIfTableOrUnion(first, plannerContext); + final Optional> secondColumnNames = getColumnNamesIfTableOrUnion(second, plannerContext); + if (!firstColumnNames.isPresent() || !secondColumnNames.isPresent()) { + // No need to set the planning error here + return false; + } + if (!firstColumnNames.equals(secondColumnNames)) { + if (null != plannerContext) { + plannerContext.setPlanningError("SQL requires union between two tables and column names queried for " + + "each table are different Left: %s, Right: %s.", + firstColumnNames.orElse(Collections.emptyList()), + secondColumnNames.orElse(Collections.emptyList())); + } + return false; + } + return true; } - static Optional> getColumnNamesIfTableOrUnion(final DruidRel druidRel) + static Optional> getColumnNamesIfTableOrUnion(final DruidRel druidRel, @Nullable PlannerContext plannerContext) { final PartialDruidQuery partialQuery = druidRel.getPartialDruidQuery(); @@ -171,7 +188,17 @@ public class DruidUnionDataSourceRule extends RelOptRule // This rel is a union itself. return Optional.of(((DruidUnionDataSourceRel) druidRel).getUnionColumnNames()); + } else if (druidTable.isPresent()) { + if (null != plannerContext) { + plannerContext.setPlanningError("SQL requires union between inputs that are not simple table scans " + + "and involve a filter or aliasing. Or column types of tables being unioned are not of same type."); + } + return Optional.empty(); } else { + if (null != plannerContext) { + plannerContext.setPlanningError("SQL requires union with input of a datasource type that is not supported." + + " Union operation is only supported between regular tables. "); + } return Optional.empty(); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionRule.java index 6e7fec39c99..40cb2161c15 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidUnionRule.java @@ -23,6 +23,7 @@ import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Union; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.DruidRel; import org.apache.druid.sql.calcite.rel.DruidUnionRel; @@ -33,9 +34,9 @@ import java.util.List; */ public class DruidUnionRule extends RelOptRule { - private static final DruidUnionRule INSTANCE = new DruidUnionRule(); + private final PlannerContext plannerContext; - private DruidUnionRule() + public DruidUnionRule(PlannerContext plannerContext) { super( operand( @@ -44,11 +45,7 @@ public class DruidUnionRule extends RelOptRule operand(DruidRel.class, none()) ) ); - } - - public static DruidUnionRule instance() - { - return INSTANCE; + this.plannerContext = plannerContext; } @Override @@ -58,7 +55,7 @@ public class DruidUnionRule extends RelOptRule final Union unionRel = call.rel(0); final DruidRel firstDruidRel = call.rel(1); final DruidRel secondDruidRel = call.rel(2); - return !DruidUnionDataSourceRule.isCompatible(unionRel, firstDruidRel, secondDruidRel); + return !DruidUnionDataSourceRule.isCompatible(unionRel, firstDruidRel, secondDruidRel, null); } @Override @@ -78,6 +75,8 @@ public class DruidUnionRule extends RelOptRule -1 ) ); + } else { + plannerContext.setPlanningError("SQL requires 'UNION' but only 'UNION ALL' is supported."); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java index f23c92a6022..4e239c94a62 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java @@ -213,7 +213,7 @@ public class SqlResource final Throwable exceptionToReport; if (e instanceof RelOptPlanner.CannotPlanException) { - exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery()); + exceptionToReport = new ISE("Cannot build plan for query: %s. %s", sqlQuery.getQuery(), e.getMessage()); } else { exceptionToReport = e; } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index ff85165a779..bd821ae4d94 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -489,13 +489,13 @@ public class BaseCalciteQueryTest extends CalciteTestBase @Rule public QueryLogHook getQueryLogHook() { - return queryLogHook = QueryLogHook.create(createQueryJsonMapper()); + queryJsonMapper = createQueryJsonMapper(); + return queryLogHook = QueryLogHook.create(queryJsonMapper); } @Before public void setUp() throws Exception { - queryJsonMapper = createQueryJsonMapper(); walker = createQuerySegmentWalker(); // also register the static injected mapper, though across multiple test runs @@ -571,12 +571,12 @@ public class BaseCalciteQueryTest extends CalciteTestBase return modules; } - public void assertQueryIsUnplannable(final String sql) + public void assertQueryIsUnplannable(final String sql, String expectedError) { - assertQueryIsUnplannable(PLANNER_CONFIG_DEFAULT, sql); + assertQueryIsUnplannable(PLANNER_CONFIG_DEFAULT, sql, expectedError); } - public void assertQueryIsUnplannable(final PlannerConfig plannerConfig, final String sql) + public void assertQueryIsUnplannable(final PlannerConfig plannerConfig, final String sql, String expectedError) { Exception e = null; try { @@ -590,6 +590,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase log.error(e, "Expected CannotPlanException for query: %s", sql); Assert.fail(sql); } + Assert.assertEquals(sql, expectedError, e.getMessage()); } /** diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java index dd2ac3909d4..ac5890dd203 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java @@ -3118,7 +3118,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest // This query is expected to fail as we do not support join with constant in the on condition // (see issue https://github.com/apache/druid/issues/9942 for more information) // TODO: Remove expected Exception when https://github.com/apache/druid/issues/9942 is fixed - @Test(expected = RelOptPlanner.CannotPlanException.class) + @Test @Parameters(source = QueryContextForJoinProvider.class) public void testJoinOnConstantShouldFail(Map queryContext) throws Exception { @@ -3126,12 +3126,19 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest final String query = "SELECT t1.dim1 from foo as t1 LEFT JOIN foo as t2 on t1.dim1 = '10.1'"; - testQuery( - query, - queryContext, - ImmutableList.of(), - ImmutableList.of() - ); + try { + testQuery( + query, + queryContext, + ImmutableList.of(), + ImmutableList.of() + ); + } + catch (RelOptPlanner.CannotPlanException cpe) { + Assert.assertEquals(cpe.getMessage(), "Possible error: SQL is resulting in a join that have unsupported operand types."); + return; + } + Assert.fail("Expected an exception of type: " + RelOptPlanner.CannotPlanException.class); } @Test diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 2e11ab97828..02abe1398bf 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -27,7 +27,6 @@ import org.apache.calcite.plan.RelOptPlanner; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.HumanReadableBytes; -import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.StringUtils; @@ -112,6 +111,7 @@ import org.apache.druid.sql.calcite.filtration.Filtration; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.rel.CannotBuildQueryException; import org.apache.druid.sql.calcite.util.CalciteTests; import org.joda.time.DateTime; @@ -2606,8 +2606,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest + "dim3, dim2, SUM(m1), COUNT(*)\n" + "FROM (SELECT dim3, dim2, m1 FROM foo2 UNION ALL SELECT dim3, dim2, m1 FROM foo)\n" + "WHERE dim2 = 'a' OR dim2 = 'en'\n" - + "GROUP BY 1, 2" - ); + + "GROUP BY 1, 2", + "Possible error: SQL requires union between inputs that are not simple table scans and involve a " + + "filter or aliasing. Or column types of tables being unioned are not of same type."); } @Test @@ -2620,7 +2621,20 @@ public class CalciteQueryTest extends BaseCalciteQueryTest + "c, COUNT(*)\n" + "FROM (SELECT dim1 AS c, m1 FROM foo UNION ALL SELECT dim2 AS c, m1 FROM numfoo)\n" + "WHERE c = 'a' OR c = 'def'\n" - + "GROUP BY 1" + + "GROUP BY 1", + "Possible error: SQL requires union between two tables " + + "and column names queried for each table are different Left: [dim1], Right: [dim2]." + ); + } + + @Test + public void testUnionIsUnplannable() + { + // Cannot plan this UNION operation + + assertQueryIsUnplannable( + "SELECT dim2, dim1, m1 FROM foo2 UNION SELECT dim1, dim2, m1 FROM foo", + "Possible error: SQL requires 'UNION' but only 'UNION ALL' is supported." ); } @@ -2634,7 +2648,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest + "c, COUNT(*)\n" + "FROM (SELECT dim1 AS c, m1 FROM foo UNION ALL SELECT cnt AS c, m1 FROM numfoo)\n" + "WHERE c = 'a' OR c = 'def'\n" - + "GROUP BY 1" + + "GROUP BY 1", + "Possible error: SQL requires union between inputs that are not simple table scans and involve " + + "a filter or aliasing. Or column types of tables being unioned are not of same type." ); } @@ -2732,7 +2748,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest + "dim1, dim2, SUM(m1), COUNT(*)\n" + "FROM (SELECT dim1, dim2, m1 FROM foo UNION ALL SELECT dim2, dim1, m1 FROM foo)\n" + "WHERE dim2 = 'a' OR dim2 = 'def'\n" - + "GROUP BY 1, 2" + + "GROUP BY 1, 2", + "Possible error: SQL requires union between two tables and column names queried for each table are different Left: [dim1, dim2, m1], Right: [dim2, dim1, m1]." ); } @@ -3629,31 +3646,6 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ); } - @Test - public void testUnplannableQueries() - { - // All of these queries are unplannable because they rely on features Druid doesn't support. - // This test is here to confirm that we don't fall back to Calcite's interpreter or enumerable implementation. - // It's also here so when we do support these features, we can have "real" tests for these queries. - - final List queries = ImmutableList.of( - // SELECT query with order by non-__time. - "SELECT dim1 FROM druid.foo ORDER BY dim1", - - // JOIN condition with not-equals (<>). - "SELECT foo.dim1, foo.dim2, l.k, l.v\n" - + "FROM foo INNER JOIN lookup.lookyloo l ON foo.dim2 <> l.k", - - // JOIN condition with a function of both sides. - "SELECT foo.dim1, foo.dim2, l.k, l.v\n" - + "FROM foo INNER JOIN lookup.lookyloo l ON CHARACTER_LENGTH(foo.dim2 || l.k) > 3\n" - ); - - for (final String query : queries) { - assertQueryIsUnplannable(query); - } - } - @Test public void testGroupingWithNullInFilter() throws Exception { @@ -3764,30 +3756,6 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ); } - @Test - public void testUnplannableTwoExactCountDistincts() - { - // Requires GROUPING SETS + GROUPING to be translated by AggregateExpandDistinctAggregatesRule. - - assertQueryIsUnplannable( - PLANNER_CONFIG_NO_HLL, - "SELECT dim2, COUNT(distinct dim1), COUNT(distinct dim2) FROM druid.foo GROUP BY dim2" - ); - } - - @Test - public void testUnplannableExactCountDistinctOnSketch() - { - // COUNT DISTINCT on a sketch cannot be exact. - - assertQueryIsUnplannable( - PLANNER_CONFIG_NO_HLL, - "SELECT COUNT(distinct unique_dim1) FROM druid.foo" - ); - } - - - @Test public void testGroupByNothingWithLiterallyFalseFilter() throws Exception { @@ -5186,6 +5154,35 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ); } + @Test + public void testUnplannableQueries() + { + // All of these queries are unplannable because they rely on features Druid doesn't support. + // This test is here to confirm that we don't fall back to Calcite's interpreter or enumerable implementation. + // It's also here so when we do support these features, we can have "real" tests for these queries. + + final Map queries = ImmutableMap.of( + // SELECT query with order by non-__time. + "SELECT dim1 FROM druid.foo ORDER BY dim1", + "Possible error: SQL query requires order by non-time column [dim1 ASC] that is not supported.", + + // JOIN condition with not-equals (<>). + "SELECT foo.dim1, foo.dim2, l.k, l.v\n" + + "FROM foo INNER JOIN lookup.lookyloo l ON foo.dim2 <> l.k", + "Possible error: SQL requires a join with 'NOT_EQUALS' condition that is not supported.", + + // JOIN condition with a function of both sides. + "SELECT foo.dim1, foo.dim2, l.k, l.v\n" + + "FROM foo INNER JOIN lookup.lookyloo l ON CHARACTER_LENGTH(foo.dim2 || l.k) > 3\n", + "Possible error: SQL requires a join with 'GREATER_THAN' condition that is not supported." + + ); + + for (final Map.Entry queryErrorPair : queries.entrySet()) { + assertQueryIsUnplannable(queryErrorPair.getKey(), queryErrorPair.getValue()); + } + } + @Test public void testCountStarWithBoundFilterSimplifyOnMetric() throws Exception { @@ -5228,6 +5225,30 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ); } + @Test + public void testUnplannableTwoExactCountDistincts() + { + // Requires GROUPING SETS + GROUPING to be translated by AggregateExpandDistinctAggregatesRule. + + assertQueryIsUnplannable( + PLANNER_CONFIG_NO_HLL, + "SELECT dim2, COUNT(distinct dim1), COUNT(distinct dim2) FROM druid.foo GROUP BY dim2", + "Possible error: SQL requires a join with 'IS_NOT_DISTINCT_FROM' condition that is not supported." + ); + } + + @Test + public void testUnplannableExactCountDistinctOnSketch() + { + // COUNT DISTINCT on a sketch cannot be exact. + + assertQueryIsUnplannable( + PLANNER_CONFIG_NO_HLL, + "SELECT COUNT(distinct unique_dim1) FROM druid.foo", + "Possible error: SQL requires a group-by on a column of type COMPLEX that is unsupported." + ); + } + @Test public void testCountStarWithBoundFilterSimplifyAnd() throws Exception { @@ -5392,9 +5413,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } catch (Throwable t) { Throwable rootException = CalciteTests.getRootCauseFromInvocationTargetExceptionChain(t); - Assert.assertEquals(IAE.class, rootException.getClass()); + Assert.assertEquals(UnsupportedSQLQueryException.class, rootException.getClass()); Assert.assertEquals( - "Illegal TIMESTAMP constant: CAST('z2000-01-01 00:00:00'):TIMESTAMP(3) NOT NULL", + "Possible error: Illegal TIMESTAMP constant: CAST('z2000-01-01 00:00:00'):TIMESTAMP(3) NOT NULL", rootException.getMessage() ); } @@ -11920,8 +11941,6 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ); } - - @Test public void testRepeatedIdenticalVirtualExpressionGrouping() throws Exception { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java index 42c6ba7b42d..26c93967f54 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java @@ -30,8 +30,11 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFactoryImpl; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.sql.calcite.planner.DruidTypeSystem; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import java.math.BigDecimal; import java.util.List; @@ -56,12 +59,21 @@ public class DruidJoinRuleTest ), ImmutableList.of("left", "right") ); + + private DruidJoinRule druidJoinRule; + + @Before + public void setup() + { + PlannerContext plannerContext = Mockito.mock(PlannerContext.class); + druidJoinRule = DruidJoinRule.instance(plannerContext); + } @Test public void test_canHandleCondition_leftEqRight() { Assert.assertTrue( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(joinType, 0), @@ -77,7 +89,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_leftFnEqRight() { Assert.assertTrue( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeCall( @@ -97,7 +109,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_leftEqRightFn() { Assert.assertFalse( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.VARCHAR), 0), @@ -117,7 +129,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_leftEqLeft() { Assert.assertFalse( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.VARCHAR), 0), @@ -133,7 +145,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_rightEqRight() { Assert.assertFalse( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.VARCHAR), 1), @@ -149,7 +161,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_true() { Assert.assertTrue( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeLiteral(true), leftType, null @@ -161,7 +173,7 @@ public class DruidJoinRuleTest public void test_canHandleCondition_false() { Assert.assertTrue( - DruidJoinRule.canHandleCondition( + druidJoinRule.canHandleCondition( rexBuilder.makeLiteral(false), leftType, null diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRuleTest.java index 3ecf30e7457..e47f156cd42 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidLogicalValuesRuleTest.java @@ -31,6 +31,7 @@ import org.apache.calcite.util.TimestampString; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.sql.calcite.planner.DruidTypeSystem; import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Assert; @@ -164,8 +165,8 @@ public class DruidLogicalValuesRuleTest new TimestampString("2021-04-01 16:54:31"), 0 ); - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Unsupported type[TIMESTAMP_WITH_LOCAL_TIME_ZONE]"); + expectedException.expect(UnsupportedSQLQueryException.class); + expectedException.expectMessage("TIMESTAMP_WITH_LOCAL_TIME_ZONE type is not supported"); DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT); } @@ -173,8 +174,8 @@ public class DruidLogicalValuesRuleTest public void testGetValueFromTimeLiteral() { RexLiteral literal = REX_BUILDER.makeTimeLiteral(new TimeString("16:54:31"), 0); - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Unsupported type[TIME]"); + expectedException.expect(UnsupportedSQLQueryException.class); + expectedException.expectMessage("TIME type is not supported"); DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT); } @@ -182,8 +183,8 @@ public class DruidLogicalValuesRuleTest public void testGetValueFromTimeWithLocalTimeZoneLiteral() { RexLiteral literal = REX_BUILDER.makeTimeWithLocalTimeZoneLiteral(new TimeString("16:54:31"), 0); - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Unsupported type[TIME_WITH_LOCAL_TIME_ZONE]"); + expectedException.expect(UnsupportedSQLQueryException.class); + expectedException.expectMessage("TIME_WITH_LOCAL_TIME_ZONE type is not supported"); DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRuleTest.java index 77ab336de1b..8707b476631 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidUnionDataSourceRuleTest.java @@ -68,7 +68,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.of(ImmutableList.of("__time", "col1", "col2")), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -85,7 +85,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.of(ImmutableList.of("col1")), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -102,7 +102,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.empty(), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -119,7 +119,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.empty(), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -137,7 +137,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.of(ImmutableList.of("__time", "col1", "col2")), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -164,7 +164,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.of(ImmutableList.of("col2", "col1")), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -182,7 +182,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.of(ImmutableList.of("__time", "col1", "col2")), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -199,7 +199,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.empty(), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } @@ -216,7 +216,7 @@ public class DruidUnionDataSourceRuleTest Assert.assertEquals( Optional.empty(), - DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel) + DruidUnionDataSourceRule.getColumnNamesIfTableOrUnion(druidRel, null) ); } }