From 41712b7a3a938e6157c9f6663b5073093729a56e Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Sun, 14 Aug 2022 00:44:08 -0700 Subject: [PATCH] Refactor SqlLifecycle into statement classes (#12845) * Refactor SqlLifecycle into statement classes Create direct & prepared statements Remove redundant exceptions from tests Tidy up Calcite query tests Make PlannerConfig more testable * Build fixes * Added builder to SqlQueryPlus * Moved Calcites system properties to saffron.properties * Build fix * Resolve merge conflict * Fix IntelliJ inspection issue * Revisions from reviews Backed out a revision to Calcite tests that didn't work out as planned * Build fix * Fixed spelling errors * Fixed failed test Prepare now enforces security; before it did not. * Rebase and fix IntelliJ inspections issue * Clean up exception handling * Fix handling of JDBC auth errors * Build fix * More tweaks to security messages --- .../druid/benchmark/query/SqlBenchmark.java | 3 +- .../query/SqlExpressionBenchmark.java | 3 +- .../query/SqlNestedDataBenchmark.java | 3 +- .../sql/TDigestSketchSqlAggregatorTest.java | 20 +- .../hll/sql/HllSketchSqlAggregatorTest.java | 15 +- .../sql/DoublesSketchSqlAggregatorTest.java | 20 +- .../sql/ThetaSketchSqlAggregatorTest.java | 12 +- .../filter/sql/BloomDimFilterSqlTest.java | 17 +- ...etsHistogramQuantileSqlAggregatorTest.java | 12 +- .../sql/QuantileSqlAggregatorTest.java | 12 +- .../sql/VarianceSqlAggregatorTest.java | 18 +- .../apache/druid/query/sql/SleepSqlTest.java | 2 +- .../druid/tests/query/ITSqlCancelTest.java | 4 +- .../AbstractAuthConfigurationTest.java | 26 +- .../ITBasicAuthConfigurationTest.java | 4 +- .../ITBasicAuthLdapConfigurationTest.java | 4 +- .../query/QueryInterruptedException.java | 6 +- .../processor/RunAllFullyWidgetTest.java | 2 +- .../apache/druid/query/QueryContextTest.java | 4 +- .../apache/druid/server/security/Access.java | 14 + .../server/security/ForbiddenException.java | 4 +- .../druid/client/JsonParserIteratorTest.java | 2 +- .../security/ForbiddenExceptionTest.java | 25 +- .../apache/druid/sql/AbstractStatement.java | 223 ++++++ .../org/apache/druid/sql/DirectStatement.java | 179 +++++ .../org/apache/druid/sql/HttpStatement.java | 77 ++ .../apache/druid/sql/PreparedStatement.java | 106 +++ .../druid/sql/SqlExecutionReporter.java | 146 ++++ .../org/apache/druid/sql/SqlLifecycle.java | 592 --------------- .../apache/druid/sql/SqlLifecycleManager.java | 42 +- .../org/apache/druid/sql/SqlQueryPlus.java | 112 ++- ...eFactory.java => SqlStatementFactory.java} | 62 +- .../java/org/apache/druid/sql/SqlToolbox.java | 61 ++ .../avatica/AbstractDruidJdbcStatement.java | 15 +- .../druid/sql/avatica/DruidConnection.java | 77 +- .../avatica/DruidJdbcPreparedStatement.java | 54 +- .../druid/sql/avatica/DruidJdbcResultSet.java | 62 +- .../druid/sql/avatica/DruidJdbcStatement.java | 38 +- .../apache/druid/sql/avatica/DruidMeta.java | 83 ++- .../druid/sql/avatica/ErrorHandler.java | 8 +- .../druid/sql/calcite/planner/Calcites.java | 29 +- .../sql/calcite/planner/DruidPlanner.java | 69 +- .../sql/calcite/planner/PlannerConfig.java | 283 ++++++-- .../sql/calcite/rule/DruidRelToDruidRule.java | 10 +- .../sql/calcite/schema/SystemSchema.java | 2 +- .../org/apache/druid/sql/guice/SqlModule.java | 70 +- .../apache/druid/sql/http/SqlResource.java | 79 +- sql/src/main/resources/saffron.properties | 28 + .../druid/sql/SqlLifecycleManagerTest.java | 69 +- .../apache/druid/sql/SqlLifecycleTest.java | 316 -------- .../apache/druid/sql/SqlStatementTest.java | 469 ++++++++++++ .../sql/avatica/DruidAvaticaHandlerTest.java | 137 ++-- .../druid/sql/avatica/DruidStatementTest.java | 69 +- .../sql/calcite/BaseCalciteQueryTest.java | 204 +++--- .../sql/calcite/CalciteArraysQueryTest.java | 87 +-- .../calcite/CalciteCorrelatedQueryTest.java | 10 +- .../sql/calcite/CalciteExplainQueryTest.java | 14 +- .../sql/calcite/CalciteIngestionDmlTest.java | 26 +- .../sql/calcite/CalciteInsertDmlTest.java | 9 +- .../sql/calcite/CalciteJoinQueryTest.java | 173 +++-- .../CalciteMultiValueStringQueryTest.java | 88 +-- .../calcite/CalciteNestedDataQueryTest.java | 118 +-- .../calcite/CalciteParameterQueryTest.java | 40 +- .../druid/sql/calcite/CalciteQueryTest.java | 673 +++++++++--------- .../sql/calcite/CalciteReplaceDmlTest.java | 5 +- .../sql/calcite/CalciteSelectQueryTest.java | 106 +-- .../sql/calcite/CalciteSimpleQueryTest.java | 22 +- .../calcite/CalciteTimeBoundaryQueryTest.java | 8 +- .../DruidPlannerResourceAnalyzeTest.java | 5 +- .../SqlVectorizedExpressionSanityTest.java | 2 - .../schema/SegmentMetadataCacheCommon.java | 10 +- .../sql/calcite/util/CalciteTestBase.java | 2 - .../druid/sql/calcite/util/CalciteTests.java | 13 +- .../druid/sql/calcite/util/QueryLogHook.java | 2 +- .../druid/sql/http/SqlHttpModuleTest.java | 6 +- .../druid/sql/http/SqlResourceTest.java | 301 ++++---- 76 files changed, 3152 insertions(+), 2571 deletions(-) create mode 100644 sql/src/main/java/org/apache/druid/sql/AbstractStatement.java create mode 100644 sql/src/main/java/org/apache/druid/sql/DirectStatement.java create mode 100644 sql/src/main/java/org/apache/druid/sql/HttpStatement.java create mode 100644 sql/src/main/java/org/apache/druid/sql/PreparedStatement.java create mode 100644 sql/src/main/java/org/apache/druid/sql/SqlExecutionReporter.java delete mode 100644 sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java rename sql/src/main/java/org/apache/druid/sql/{SqlLifecycleFactory.java => SqlStatementFactory.java} (57%) create mode 100644 sql/src/main/java/org/apache/druid/sql/SqlToolbox.java create mode 100644 sql/src/main/resources/saffron.properties delete mode 100644 sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java create mode 100644 sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java index 3d6126fb134..aa679b3555b 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -48,7 +48,6 @@ import org.apache.druid.sql.calcite.aggregation.builtin.CountSqlAggregator; import org.apache.druid.sql.calcite.expression.SqlOperatorConversion; import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; @@ -75,6 +74,7 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import javax.annotation.Nullable; + import java.util.HashSet; import java.util.List; import java.util.Map; @@ -92,7 +92,6 @@ public class SqlBenchmark { static { NullHandling.initializeForTests(); - Calcites.setSystemProperties(); } private static final Logger log = new Logger(SqlBenchmark.class); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java index 6f3c8bedb6d..f22832fd6d4 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java @@ -39,7 +39,6 @@ import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.sql.calcite.SqlVectorizedExpressionSanityTest; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; @@ -65,6 +64,7 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import javax.annotation.Nullable; + import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -82,7 +82,6 @@ public class SqlExpressionBenchmark static { NullHandling.initializeForTests(); - Calcites.setSystemProperties(); ExpressionProcessing.initializeForStrictBooleansTests(true); } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java index d719da7dba3..9eea985d4ab 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java @@ -45,7 +45,6 @@ import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.sql.calcite.SqlVectorizedExpressionSanityTest; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; @@ -71,6 +70,7 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import javax.annotation.Nullable; + import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -85,7 +85,6 @@ public class SqlNestedDataBenchmark static { NullHandling.initializeForTests(); - Calcites.setSystemProperties(); ExpressionProcessing.initializeForStrictBooleansTests(true); } diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java index 2bfcf193553..40ac7928b81 100644 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java +++ b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java @@ -117,7 +117,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testComputingSketchOnNumericValues() throws Exception + public void testComputingSketchOnNumericValues() { cannotVectorize(); @@ -145,7 +145,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testComputingSketchOnCastedString() throws Exception + public void testComputingSketchOnCastedString() { cannotVectorize(); @@ -185,7 +185,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testDefaultCompressionForTDigestGenerateSketchAgg() throws Exception + public void testDefaultCompressionForTDigestGenerateSketchAgg() { cannotVectorize(); @@ -211,7 +211,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testComputingQuantileOnPreAggregatedSketch() throws Exception + public void testComputingQuantileOnPreAggregatedSketch() { cannotVectorize(); @@ -253,7 +253,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGeneratingSketchAndComputingQuantileOnFly() throws Exception + public void testGeneratingSketchAndComputingQuantileOnFly() { cannotVectorize(); @@ -308,7 +308,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnNumericValues() throws Exception + public void testQuantileOnNumericValues() { cannotVectorize(); @@ -345,7 +345,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testCompressionParamForTDigestQuantileAgg() throws Exception + public void testCompressionParamForTDigestQuantileAgg() { cannotVectorize(); testQuery( @@ -383,7 +383,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnCastedString() throws Exception + public void testQuantileOnCastedString() { cannotVectorize(); @@ -436,7 +436,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { cannotVectorize(); @@ -468,7 +468,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { cannotVectorize(); testQuery( diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index 6ebb7e27a00..37813c9a215 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -85,6 +85,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest return Iterables.concat(super.getJacksonModules(), new HllSketchModule().getJacksonModules()); } + @SuppressWarnings("resource") @Override public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException { @@ -149,7 +150,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctHllSketch() throws Exception + public void testApproxCountDistinctHllSketch() { // Can't vectorize due to SUBSTRING expression. cannotVectorize(); @@ -244,7 +245,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest @Test - public void testAvgDailyCountDistinctHllSketch() throws Exception + public void testAvgDailyCountDistinctHllSketch() { // Can't vectorize due to outer query, which runs on an inline datasource. cannotVectorize(); @@ -340,7 +341,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctHllSketchIsRounded() throws Exception + public void testApproxCountDistinctHllSketchIsRounded() { testQuery( "SELECT" @@ -376,7 +377,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testHllSketchPostAggs() throws Exception + public void testHllSketchPostAggs() { final String sketchSummary = "### HLL SKETCH SUMMARY: \n" + " Log Config K : 12\n" @@ -528,7 +529,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testtHllSketchPostAggsPostSort() throws Exception + public void testtHllSketchPostAggsPostSort() { final String sketchSummary = "### HLL SKETCH SUMMARY: \n" + " Log Config K : 12\n" @@ -582,7 +583,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { // timeseries with all granularity have a single group, so should return default results for given aggregators testQuery( @@ -620,7 +621,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java index 4959e4ce5b4..56a6adad56c 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java @@ -140,7 +140,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnFloatAndLongs() throws Exception + public void testQuantileOnFloatAndLongs() { testQuery( "SELECT\n" @@ -213,7 +213,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnComplexColumn() throws Exception + public void testQuantileOnComplexColumn() { testQuery( "SELECT\n" @@ -270,7 +270,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnCastedString() throws Exception + public void testQuantileOnCastedString() { final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -363,7 +363,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnInnerQuery() throws Exception + public void testQuantileOnInnerQuery() { final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -429,7 +429,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnInnerQuantileQuery() throws Exception + public void testQuantileOnInnerQuantileQuery() { ImmutableList.Builder builder = ImmutableList.builder(); builder.add(new Object[]{"", 1.0}); @@ -496,7 +496,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testDoublesSketchPostAggs() throws Exception + public void testDoublesSketchPostAggs() { testQuery( "SELECT\n" @@ -679,7 +679,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testDoublesSketchPostAggsPostSort() throws Exception + public void testDoublesSketchPostAggsPostSort() { testQuery( "SELECT DS_GET_QUANTILE(y, 0.5), DS_GET_QUANTILE(y, 0.98) from (" @@ -728,7 +728,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { testQuery( "SELECT\n" @@ -768,7 +768,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" @@ -828,7 +828,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testSuccessWithSmallMaxStreamLength() throws Exception + public void testSuccessWithSmallMaxStreamLength() { final Map context = new HashMap<>(QUERY_CONTEXT_DEFAULT); context.put( diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java index e95fb31d841..8d42e66dbaf 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java @@ -145,7 +145,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctThetaSketch() throws Exception + public void testApproxCountDistinctThetaSketch() { // Cannot vectorize due to SUBSTRING. cannotVectorize(); @@ -265,7 +265,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testAvgDailyCountDistinctThetaSketch() throws Exception + public void testAvgDailyCountDistinctThetaSketch() { // Can't vectorize due to outer query (it operates on an inlined data source, which cannot be vectorized). cannotVectorize(); @@ -359,7 +359,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testThetaSketchPostAggs() throws Exception + public void testThetaSketchPostAggs() { final List expectedResults; @@ -529,7 +529,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testThetaSketchPostAggsPostSort() throws Exception + public void testThetaSketchPostAggsPostSort() { final String sql = "SELECT DS_THETA(dim2) as y FROM druid.foo ORDER BY THETA_SKETCH_ESTIMATE(DS_THETA(dim2)) DESC LIMIT 10"; @@ -579,7 +579,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { testQuery( "SELECT\n" @@ -638,7 +638,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index f986e35daf7..4e1659c228f 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -52,6 +52,7 @@ import org.apache.druid.sql.http.SqlParameter; import org.junit.Ignore; import org.junit.Test; +import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -88,7 +89,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest } @Test - public void testBloomFilter() throws Exception + public void testBloomFilter() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addString("def"); @@ -116,7 +117,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest } @Test - public void testBloomFilterExprFilter() throws Exception + public void testBloomFilterExprFilter() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addString("a-foo"); @@ -157,7 +158,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest } @Test - public void testBloomFilterVirtualColumn() throws Exception + public void testBloomFilterVirtualColumn() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addString("def-foo"); @@ -187,7 +188,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest @Test - public void testBloomFilterVirtualColumnNumber() throws Exception + public void testBloomFilterVirtualColumnNumber() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addFloat(20.2f); @@ -218,7 +219,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest } @Test - public void testBloomFilters() throws Exception + public void testBloomFilters() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addString("def"); @@ -254,7 +255,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest @Ignore("this test is really slow and is intended to use for comparisons with testBloomFilterBigParameter") @Test - public void testBloomFilterBigNoParam() throws Exception + public void testBloomFilterBigNoParam() throws IOException { BloomKFilter filter = new BloomKFilter(5_000_000); filter.addString("def"); @@ -282,7 +283,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest @Ignore("this test is for comparison with testBloomFilterBigNoParam") @Test - public void testBloomFilterBigParameter() throws Exception + public void testBloomFilterBigParameter() throws IOException { BloomKFilter filter = new BloomKFilter(5_000_000); filter.addString("def"); @@ -310,7 +311,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest } @Test - public void testBloomFilterNullParameter() throws Exception + public void testBloomFilterNullParameter() throws IOException { BloomKFilter filter = new BloomKFilter(1500); filter.addBytes(null, 0, 0); diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java index 135fe403877..f5c850ea715 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java @@ -125,7 +125,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ @Test - public void testQuantileOnFloatAndLongs() throws Exception + public void testQuantileOnFloatAndLongs() { final List expectedResults = ImmutableList.of( new Object[]{ @@ -238,7 +238,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ } @Test - public void testQuantileOnCastedString() throws Exception + public void testQuantileOnCastedString() { testQuery( "SELECT\n" @@ -354,7 +354,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ } @Test - public void testQuantileOnComplexColumn() throws Exception + public void testQuantileOnComplexColumn() { cannotVectorize(); @@ -446,7 +446,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ } @Test - public void testQuantileOnInnerQuery() throws Exception + public void testQuantileOnInnerQuery() { final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -513,7 +513,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { cannotVectorize(); @@ -564,7 +564,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { cannotVectorize(); testQuery( diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 6955726d5d4..3063b32f93e 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -123,7 +123,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnFloatAndLongs() throws Exception + public void testQuantileOnFloatAndLongs() { testQuery( "SELECT\n" @@ -195,7 +195,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnComplexColumn() throws Exception + public void testQuantileOnComplexColumn() { testQuery( "SELECT\n" @@ -243,7 +243,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnInnerQuery() throws Exception + public void testQuantileOnInnerQuery() { final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -309,7 +309,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testQuantileOnCastedString() throws Exception + public void testQuantileOnCastedString() { cannotVectorize(); @@ -372,7 +372,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { testQuery( "SELECT\n" @@ -404,7 +404,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java index 91b0a37f88c..ea50f8d3896 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java @@ -161,7 +161,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testVarPop() throws Exception + public void testVarPop() { VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -208,7 +208,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testVarSamp() throws Exception + public void testVarSamp() { VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -255,7 +255,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testStdDevPop() throws Exception + public void testStdDevPop() { VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -310,7 +310,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testStdDevSamp() throws Exception + public void testStdDevSamp() { VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -363,7 +363,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testStdDevWithVirtualColumns() throws Exception + public void testStdDevWithVirtualColumns() { VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -422,7 +422,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest @Test - public void testVarianceOrderBy() throws Exception + public void testVarianceOrderBy() { List expectedResults = NullHandling.sqlCompatible() ? ImmutableList.of( @@ -467,7 +467,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testVariancesOnCastedString() throws Exception + public void testVariancesOnCastedString() { testQuery( "SELECT\n" @@ -507,7 +507,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testEmptyTimeseriesResults() throws Exception + public void testEmptyTimeseriesResults() { testQuery( "SELECT\n" @@ -557,7 +557,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" diff --git a/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java b/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java index e8381572c66..cb3e9efdf07 100644 --- a/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java +++ b/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java @@ -67,7 +67,7 @@ public class SleepSqlTest extends BaseCalciteQueryTest } @Test - public void testSleepFunction() throws Exception + public void testSleepFunction() { testQuery( "SELECT sleep(m1) from foo where m1 < 2.0", diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java index 3d3d43e75b9..ad8dd3cb11f 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java @@ -107,10 +107,10 @@ public class ITSqlCancelTest throw new ISE("Query is not canceled after cancel request"); } QueryException queryException = jsonMapper.readValue(queryResponse.getContent(), QueryException.class); - if (!QueryInterruptedException.QUERY_CANCELLED.equals(queryException.getErrorCode())) { + if (!QueryInterruptedException.QUERY_CANCELED.equals(queryException.getErrorCode())) { throw new ISE( "Expected error code [%s], actual [%s]", - QueryInterruptedException.QUERY_CANCELLED, + QueryInterruptedException.QUERY_CANCELED, queryException.getErrorCode() ); } diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java index 5e0defb155f..aa729774491 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java @@ -311,7 +311,7 @@ public abstract class AbstractAuthConfigurationTest datasourceOnlyUserClient, SYS_SCHEMA_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Unauthorized\"}" ); LOG.info("Checking sys.servers query as datasourceOnlyUser..."); @@ -319,7 +319,7 @@ public abstract class AbstractAuthConfigurationTest datasourceOnlyUserClient, SYS_SCHEMA_SERVERS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Unauthorized\"}" ); LOG.info("Checking sys.server_segments query as datasourceOnlyUser..."); @@ -327,7 +327,7 @@ public abstract class AbstractAuthConfigurationTest datasourceOnlyUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Unauthorized\"}" ); LOG.info("Checking sys.tasks query as datasourceOnlyUser..."); @@ -335,7 +335,7 @@ public abstract class AbstractAuthConfigurationTest datasourceOnlyUserClient, SYS_SCHEMA_TASKS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Unauthorized\"}" ); } @@ -366,7 +366,7 @@ public abstract class AbstractAuthConfigurationTest datasourceAndSysUserClient, SYS_SCHEMA_SERVERS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}" ); LOG.info("Checking sys.server_segments query as datasourceAndSysUser..."); @@ -374,7 +374,7 @@ public abstract class AbstractAuthConfigurationTest datasourceAndSysUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}" ); LOG.info("Checking sys.tasks query as datasourceAndSysUser..."); @@ -652,15 +652,13 @@ public abstract class AbstractAuthConfigurationTest protected void testAvaticaQuery(Properties connectionProperties, String url) { LOG.info("URL: " + url); - try { - Connection connection = DriverManager.getConnection(url, connectionProperties); - Statement statement = connection.createStatement(); + try ( + Connection connection = DriverManager.getConnection(url, connectionProperties); + Statement statement = connection.createStatement()) { statement.setMaxRows(450); String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS"; ResultSet resultSet = statement.executeQuery(query); Assert.assertTrue(resultSet.next()); - statement.close(); - connection.close(); } catch (Exception e) { throw new RuntimeException(e); @@ -681,9 +679,9 @@ public abstract class AbstractAuthConfigurationTest throws Exception { LOG.info("URL: " + url); - try { - Connection connection = DriverManager.getConnection(url, connectionProperties); - Statement statement = connection.createStatement(); + try ( + Connection connection = DriverManager.getConnection(url, connectionProperties); + Statement statement = connection.createStatement()) { statement.setMaxRows(450); String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS"; statement.executeQuery(query); diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 7acf915954c..2ea69d9348f 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -47,8 +47,8 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest private static final String BASIC_AUTHENTICATOR = "basic"; private static final String BASIC_AUTHORIZER = "basic"; - private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User metadata store authentication failed. -> BasicSecurityAuthenticationException: User metadata store authentication failed."; - private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: ForbiddenException: Allowed:false, Message:"; + private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User metadata store authentication failed."; + private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized"; private HttpClient druid99; diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index f174cdf8fa6..a53223068f4 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -53,8 +53,8 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT private static final String LDAP_AUTHENTICATOR = "ldap"; private static final String LDAP_AUTHORIZER = "ldapauth"; - private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User LDAP authentication failed. -> BasicSecurityAuthenticationException: User LDAP authentication failed."; - private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: ForbiddenException: Allowed:false, Message:"; + private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User LDAP authentication failed."; + private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized"; @Inject IntegrationTestingConfig config; diff --git a/processing/src/main/java/org/apache/druid/query/QueryInterruptedException.java b/processing/src/main/java/org/apache/druid/query/QueryInterruptedException.java index bf000f6ec83..ae67039242f 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryInterruptedException.java +++ b/processing/src/main/java/org/apache/druid/query/QueryInterruptedException.java @@ -44,7 +44,9 @@ import java.util.concurrent.CancellationException; public class QueryInterruptedException extends QueryException { public static final String QUERY_INTERRUPTED = "Query interrupted"; - public static final String QUERY_CANCELLED = "Query cancelled"; + // Note: the proper spelling is with a single "l", but the version with + // two "l"s is documented, we can't change the text of the message. + public static final String QUERY_CANCELED = "Query cancelled"; public static final String UNAUTHORIZED = "Unauthorized request"; public static final String UNSUPPORTED_OPERATION = "Unsupported operation"; public static final String TRUNCATED_RESPONSE_CONTEXT = "Truncated response context"; @@ -96,7 +98,7 @@ public class QueryInterruptedException extends QueryException } else if (e instanceof InterruptedException) { return QUERY_INTERRUPTED; } else if (e instanceof CancellationException) { - return QUERY_CANCELLED; + return QUERY_CANCELED; } else if (e instanceof UnsupportedOperationException) { return UNSUPPORTED_OPERATION; } else if (e instanceof TruncatedResponseContextException) { diff --git a/processing/src/test/java/org/apache/druid/frame/processor/RunAllFullyWidgetTest.java b/processing/src/test/java/org/apache/druid/frame/processor/RunAllFullyWidgetTest.java index a4b9cdbf6d2..caec7984b2b 100644 --- a/processing/src/test/java/org/apache/druid/frame/processor/RunAllFullyWidgetTest.java +++ b/processing/src/test/java/org/apache/druid/frame/processor/RunAllFullyWidgetTest.java @@ -352,7 +352,7 @@ public class RunAllFullyWidgetTest extends FrameProcessorExecutorTest.BaseFrameP Assert.assertTrue(future.cancel(true)); Assert.assertTrue(future.isCancelled()); - // We don't have a good way to wait for future cancelation to truly finish. Resort to a waiting-loop. + // We don't have a good way to wait for future cancellation to truly finish. Resort to a waiting-loop. while (exec.cancelableProcessorCount() > 0) { Thread.sleep(10); } diff --git a/processing/src/test/java/org/apache/druid/query/QueryContextTest.java b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java index 3b1eee0db06..dd522b2fc2d 100644 --- a/processing/src/test/java/org/apache/druid/query/QueryContextTest.java +++ b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java @@ -265,14 +265,14 @@ public class QueryContextTest @Test public void testLegacyReturnsLegacy() { - Query legacy = new LegacyContextQuery(ImmutableMap.of("foo", "bar")); + Query legacy = new LegacyContextQuery(ImmutableMap.of("foo", "bar")); Assert.assertNull(legacy.getQueryContext()); } @Test public void testNonLegacyIsNotLegacyContext() { - Query timeseries = Druids.newTimeseriesQueryBuilder() + Query timeseries = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2015-01-02/2015-01-03") .granularity(Granularities.DAY) diff --git a/server/src/main/java/org/apache/druid/server/security/Access.java b/server/src/main/java/org/apache/druid/server/security/Access.java index 0c86a42a1a2..543ce1b0d73 100644 --- a/server/src/main/java/org/apache/druid/server/security/Access.java +++ b/server/src/main/java/org/apache/druid/server/security/Access.java @@ -19,10 +19,13 @@ package org.apache.druid.server.security; +import com.google.common.base.Strings; import org.apache.druid.java.util.common.StringUtils; public class Access { + static final String DEFAULT_ERROR_MESSAGE = "Unauthorized"; + public static final Access OK = new Access(true); private final boolean allowed; @@ -49,6 +52,17 @@ public class Access return message; } + public String toMessage() + { + if (!Strings.isNullOrEmpty(message)) { + return toString(); + } else if (allowed) { + return "Authorized"; + } else { + return DEFAULT_ERROR_MESSAGE; + } + } + @Override public String toString() { diff --git a/server/src/main/java/org/apache/druid/server/security/ForbiddenException.java b/server/src/main/java/org/apache/druid/server/security/ForbiddenException.java index 7de37d677d8..812fd0fd1cd 100644 --- a/server/src/main/java/org/apache/druid/server/security/ForbiddenException.java +++ b/server/src/main/java/org/apache/druid/server/security/ForbiddenException.java @@ -32,11 +32,9 @@ import java.util.function.Function; */ public class ForbiddenException extends RuntimeException implements SanitizableException { - static final String DEFAULT_ERROR_MESSAGE = "Unauthorized."; - public ForbiddenException() { - super(DEFAULT_ERROR_MESSAGE); + super(Access.DEFAULT_ERROR_MESSAGE); } @JsonCreator diff --git a/server/src/test/java/org/apache/druid/client/JsonParserIteratorTest.java b/server/src/test/java/org/apache/druid/client/JsonParserIteratorTest.java index 129a263cea1..d0e154d08a1 100644 --- a/server/src/test/java/org/apache/druid/client/JsonParserIteratorTest.java +++ b/server/src/test/java/org/apache/druid/client/JsonParserIteratorTest.java @@ -90,7 +90,7 @@ public class JsonParserIteratorTest } @Test - public void testConvertFutureCancelationToQueryInterruptedException() + public void testConvertFutureCancellationToQueryInterruptedException() { JsonParserIterator iterator = new JsonParserIterator<>( JAVA_TYPE, diff --git a/server/src/test/java/org/apache/druid/server/security/ForbiddenExceptionTest.java b/server/src/test/java/org/apache/druid/server/security/ForbiddenExceptionTest.java index 01179e6dea0..dc3bc914448 100644 --- a/server/src/test/java/org/apache/druid/server/security/ForbiddenExceptionTest.java +++ b/server/src/test/java/org/apache/druid/server/security/ForbiddenExceptionTest.java @@ -45,7 +45,7 @@ public class ForbiddenExceptionTest ForbiddenException forbiddenException = new ForbiddenException(ERROR_MESSAGE_ORIGINAL); ForbiddenException actual = forbiddenException.sanitize(trasformFunction); Assert.assertNotNull(actual); - Assert.assertEquals(actual.getMessage(), ForbiddenException.DEFAULT_ERROR_MESSAGE); + Assert.assertEquals(actual.getMessage(), Access.DEFAULT_ERROR_MESSAGE); Mockito.verify(trasformFunction).apply(ArgumentMatchers.eq(ERROR_MESSAGE_ORIGINAL)); Mockito.verifyNoMoreInteractions(trasformFunction); } @@ -61,4 +61,27 @@ public class ForbiddenExceptionTest Mockito.verify(trasformFunction).apply(ArgumentMatchers.eq(ERROR_MESSAGE_ORIGINAL)); Mockito.verifyNoMoreInteractions(trasformFunction); } + + // Silly, but required to get the code coverage tests to pass. + @Test + public void testAccess() + { + Access access = new Access(false); + Assert.assertFalse(access.isAllowed()); + Assert.assertEquals("", access.getMessage()); + Assert.assertEquals("Allowed:false, Message:", access.toString()); + Assert.assertEquals(Access.DEFAULT_ERROR_MESSAGE, access.toMessage()); + + access = new Access(true); + Assert.assertTrue(access.isAllowed()); + Assert.assertEquals("", access.getMessage()); + Assert.assertEquals("Allowed:true, Message:", access.toString()); + Assert.assertEquals("Authorized", access.toMessage()); + + access = new Access(false, "oops"); + Assert.assertFalse(access.isAllowed()); + Assert.assertEquals("oops", access.getMessage()); + Assert.assertEquals("Allowed:false, Message:oops", access.toString()); + Assert.assertEquals("Allowed:false, Message:oops", access.toMessage()); + } } diff --git a/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java new file mode 100644 index 00000000000..2a001b7e509 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java @@ -0,0 +1,223 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import org.apache.calcite.sql.parser.SqlParseException; +import org.apache.calcite.tools.ValidationException; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.query.QueryContext; +import org.apache.druid.query.QueryContexts; +import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.AuthorizationUtils; +import org.apache.druid.server.security.ForbiddenException; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.calcite.planner.DruidPlanner; +import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.PlannerResult; + +import java.io.Closeable; +import java.util.Set; +import java.util.UUID; +import java.util.function.Function; + +/** + * Represents a SQL statement either for preparation or execution. + * A statement is given by a lifecycle context and the statement + * to execute. See derived classes for actions. Closing the statement + * emits logs and metrics for the statement. + */ +public abstract class AbstractStatement implements Closeable +{ + private static final Logger log = new Logger(AbstractStatement.class); + + protected final SqlToolbox sqlToolbox; + protected final SqlQueryPlus queryPlus; + protected final SqlExecutionReporter reporter; + protected PlannerContext plannerContext; + + /** + * Resource actions used with authorizing a cancellation request. These actions + * include only the data-level actions (i.e. the datasource.) + */ + protected Set cancellationResourceActions; + + /** + * Full resource actions authorized as part of this request. Used when logging + * resource actions. Includes the query context, if query context authorization + * is enabled. + */ + protected Set fullResourceActions; + + public AbstractStatement( + final SqlToolbox sqlToolbox, + final SqlQueryPlus queryPlus, + final String remoteAddress + ) + { + this.sqlToolbox = sqlToolbox; + this.queryPlus = queryPlus; + this.reporter = new SqlExecutionReporter(this, remoteAddress); + + // Context is modified, not copied. + contextWithSqlId(queryPlus.context()) + .addDefaultParams(sqlToolbox.defaultQueryConfig.getContext()); + } + + private static QueryContext contextWithSqlId(QueryContext queryContext) + { + // "bySegment" results are never valid to use with SQL because the result format is incompatible + // so, overwrite any user specified context to avoid exceptions down the line + + if (queryContext.removeUserParam(QueryContexts.BY_SEGMENT_KEY) != null) { + log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter"); + } + queryContext.addDefaultParam(PlannerContext.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); + return queryContext; + } + + public String sqlQueryId() + { + return queryPlus.context().getAsString(PlannerContext.CTX_SQL_QUERY_ID); + } + + /** + * Validate SQL query and authorize against any datasources or views which + * will take part in the query. Must be called by the API methods, not + * directly. + */ + protected void validate(DruidPlanner planner) + { + plannerContext = planner.getPlannerContext(); + plannerContext.setAuthenticationResult(queryPlus.authResult()); + plannerContext.setParameters(queryPlus.parameters()); + try { + planner.validate(); + } + // We can't collapse catch clauses since SqlPlanningException has + // type-sensitive constructors. + catch (SqlParseException e) { + throw new SqlPlanningException(e); + } + catch (ValidationException e) { + throw new SqlPlanningException(e); + } + } + + /** + * Authorize the query using the authorizer provided, and an option to authorize + * context variables as well as query resources. + */ + protected void authorize( + DruidPlanner planner, + Function, Access> authorizer + ) + { + boolean authorizeContextParams = sqlToolbox.authConfig.authorizeQueryContextParams(); + + // Authentication is done by the planner using the function provided + // here. The planner ensures that this step is done before planning. + Access authorizationResult = planner.authorize(authorizer, authorizeContextParams); + if (!authorizationResult.isAllowed()) { + throw new ForbiddenException(authorizationResult.toMessage()); + } + + // Capture the query resources twice. The first is used to validate the request + // to cancel the query, and includes only the query-level resources. The second + // is used to report the resources actually authorized and includes the + // query context variables, if we are authorizing them. + cancellationResourceActions = planner.resourceActions(false); + fullResourceActions = planner.resourceActions(authorizeContextParams); + } + + /** + * Resource authorizer based on the authentication result + * provided earlier. + */ + protected Function, Access> authorizer() + { + return resourceActions -> + AuthorizationUtils.authorizeAllResourceActions( + queryPlus.authResult(), + resourceActions, + sqlToolbox.plannerFactory.getAuthorizerMapper() + ); + } + + /** + * Plan the query, which also produces the sequence that runs + * the query. + */ + protected PlannerResult plan(DruidPlanner planner) + { + try { + return planner.plan(); + } + catch (ValidationException e) { + throw new SqlPlanningException(e); + } + } + + /** + * Return the datasource and table resources for this + * statement. + */ + public Set resources() + { + return cancellationResourceActions; + } + + public Set allResources() + { + return fullResourceActions; + } + + public SqlQueryPlus sqlRequest() + { + return queryPlus; + } + + public SqlExecutionReporter reporter() + { + return reporter; + } + + /** + * Releases resources and emits logs and metrics as defined the + * associated reporter. + */ + @Override + public void close() + { + try { + closeQuietly(); + } + catch (Exception e) { + reporter.failed(e); + } + reporter.emit(); + } + + /** + * Closes the statement without reporting metrics. + */ + public void closeQuietly() + { + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java new file mode 100644 index 00000000000..f6aade646a0 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.guava.Sequence; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.query.QueryInterruptedException; +import org.apache.druid.sql.SqlLifecycleManager.Cancelable; +import org.apache.druid.sql.calcite.planner.DruidPlanner; +import org.apache.druid.sql.calcite.planner.PlannerResult; +import org.apache.druid.sql.calcite.planner.PrepareResult; + +import java.util.concurrent.CopyOnWriteArrayList; + +/** + * Lifecycle for direct SQL statement execution, which means that the query + * is planned and executed in a single step, with no "prepare" step. + * Callers need call only: + *
    + *
  • {@link #execute()} to execute the query. The caller must close + * the returned {@code Sequence}.
  • + *
  • {@link #close()} to report metrics, or {@link #closeQuietly()} + * otherwise.
  • + *
+ *

+ * The {@link #cancel()} method may be called from any thread and cancels + * the query. + *

+ * All other methods are optional and are generally for introspection. + *

+ * The class supports two threading models. In the simple case, the same + * thread creates this object and executes the query. In the split model, + * a request thread creates this object and plans the query. A separate + * response thread consumes results and performs any desired logging, etc. + * The object is transferred between threads, with no overlapping access. + *

+ * As statement holds no resources and need not be called. Only the + * {@code Sequence} returned from {@link #execute()} need be closed. + *

+ * Use this class for tests and JDBC execution. Use the HTTP variant, + * {@link HttpStatement} for HTTP requests. + */ +public class DirectStatement extends AbstractStatement implements Cancelable +{ + private static final Logger log = new Logger(DirectStatement.class); + + protected PrepareResult prepareResult; + protected PlannerResult plannerResult; + private volatile boolean canceled; + + public DirectStatement( + final SqlToolbox lifecycleToolbox, + final SqlQueryPlus queryPlus, + final String remoteAddress + ) + { + super(lifecycleToolbox, queryPlus, remoteAddress); + } + + public DirectStatement( + final SqlToolbox lifecycleToolbox, + final SqlQueryPlus sqlRequest + ) + { + super(lifecycleToolbox, sqlRequest, null); + } + + /** + * Direct execution of a query, including: + *

    + *
  • Create the planner.
  • + *
  • Parse the statement.
  • + *
  • Provide parameters using a + * "query optimized" structure.
  • + *
  • Validate the query against the Druid catalog.
  • + *
  • Authorize access to the resources which the query needs.
  • + *
  • Plan the query.
  • + *
  • Return a {@link Sequence} which executes the query and returns results.
  • + *
+ * + * This method is called from the request thread; results are read in the + * response thread. + * + * @return sequence which delivers query results + */ + public Sequence execute() + { + try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( + queryPlus.sql(), + queryPlus.context())) { + validate(planner); + authorize(planner, authorizer()); + + // Adding the statement to the lifecycle manager allows cancellation. + // Tests cancel during this call; real clients might do so if the plan + // or execution prep stages take too long for some unexpected reason. + sqlToolbox.sqlLifecycleManager.add(sqlQueryId(), this); + checkCanceled(); + plannerResult = plan(planner); + prepareResult = planner.prepareResult(); + return doExecute(); + } + catch (RuntimeException e) { + reporter.failed(e); + throw e; + } + } + + public PrepareResult prepareResult() + { + return prepareResult; + } + + /** + * Do the actual execute step which allows subclasses to wrap the sequence, + * as is sometimes needed for testing. + */ + protected Sequence doExecute() + { + // Check cancellation here and not in execute() above: + // required for SqlResourceTest to work. + checkCanceled(); + try { + return plannerResult.run(); + } + catch (RuntimeException e) { + reporter.failed(e); + throw e; + } + } + + /** + * Checks for cancellation. As it turns out, this is really just a test-time + * check: an actual client can't cancel the query until the query reports + * a query ID, which won't happen until after the {@link #execute())} + * call. + */ + private void checkCanceled() + { + if (canceled) { + throw new QueryInterruptedException( + QueryInterruptedException.QUERY_CANCELED, + StringUtils.format("Query is canceled [%s]", sqlQueryId()), + null, + null + ); + } + } + + @Override + public void cancel() + { + canceled = true; + final CopyOnWriteArrayList nativeQueryIds = plannerContext.getNativeQueryIds(); + + for (String nativeQueryId : nativeQueryIds) { + log.debug("Canceling native query [%s]", nativeQueryId); + sqlToolbox.queryScheduler.cancelQuery(nativeQueryId); + } + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java new file mode 100644 index 00000000000..57bb81d8cdf --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.AuthorizationUtils; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.http.SqlQuery; + +import javax.servlet.http.HttpServletRequest; + +import java.util.Set; +import java.util.function.Function; + +/** + * SQL statement lifecycle for the HTTP endpoint. The request thread + * creates the object and calls {@link #execute()}. The response thread + * reads results and inspects the statement contents to emit logs and + * metrics. The object is transferred between threads, with no overlapping + * access. + *

+ * The key extension of an HTTP statement is the use of the HTTP request + * for authorization. + */ +public class HttpStatement extends DirectStatement +{ + private final HttpServletRequest req; + + public HttpStatement( + final SqlToolbox lifecycleToolbox, + final SqlQuery sqlQuery, + final HttpServletRequest req + ) + { + super( + lifecycleToolbox, + SqlQueryPlus.builder(sqlQuery) + .auth(AuthorizationUtils.authenticationResultFromRequest(req)) + .build(), + req.getRemoteAddr() + ); + this.req = req; + } + + @Override + protected Function, Access> authorizer() + { + return resourceActions -> + AuthorizationUtils.authorizeAllResourceActions( + req, + resourceActions, + sqlToolbox.plannerFactory.getAuthorizerMapper() + ); + } + + public SqlRowTransformer createRowTransformer() + { + return new SqlRowTransformer(plannerContext.getTimeZone(), plannerResult.rowType()); + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java new file mode 100644 index 00000000000..145f697bf45 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.tools.ValidationException; +import org.apache.druid.sql.calcite.planner.DruidPlanner; +import org.apache.druid.sql.calcite.planner.PrepareResult; + +import java.util.List; + +/** + * Statement for the JDBC prepare-once, execute many model. + */ +public class PreparedStatement extends AbstractStatement +{ + private final SqlQueryPlus originalRequest; + private PrepareResult prepareResult; + + public PreparedStatement( + final SqlToolbox lifecycleToolbox, + final SqlQueryPlus queryPlus + ) + { + super(lifecycleToolbox, queryPlus, null); + this.originalRequest = queryPlus; + } + + /** + * Prepare the query lifecycle for execution, without completely planning into + * something that is executable, but including some initial parsing and + * validation, to support prepared statements via JDBC. + *

+ * Note that, per JDBC convention, the prepare step does not provide + * parameter values: those are provided later during execution and will generally + * vary from one execution to the next. + * + *

    + *
  • Create the planner.
  • + *
  • Parse the statement.
  • + *
  • JDBC does not provide parameter values at prepare time. + * They are provided during execution later, where we'll replan the + * query to use the + * "query optimized" structure.
  • + *
  • Validate the query against the Druid catalog.
  • + *
  • Authorize access to the resources which the query needs.
  • + *
  • Return a {@link PrepareResult} which describes the query.
  • + *
+ */ + public PrepareResult prepare() + { + try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( + queryPlus.sql(), + queryPlus.context())) { + validate(planner); + authorize(planner, authorizer()); + + // Do the prepare step. + try { + this.prepareResult = planner.prepare(); + return prepareResult; + } + catch (ValidationException e) { + throw new SqlPlanningException(e); + } + } + catch (RuntimeException e) { + reporter.failed(e); + throw e; + } + } + + /** + * Execute a prepared JDBC query. Druid uses + * + * "query optimized" parameters, which means we do not reuse the statement + * prepared above, but rather plan anew with the actual parameter values. The + * same statement can be execute many times, including concurrently. Each + * execution repeats the parse, validate, authorize and plan steps since + * data, permissions, views and other dependencies may have changed. + */ + public DirectStatement execute(List parameters) + { + return new DirectStatement( + sqlToolbox, + originalRequest.withParameters(parameters) + ); + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/SqlExecutionReporter.java b/sql/src/main/java/org/apache/druid/sql/SqlExecutionReporter.java new file mode 100644 index 00000000000..94e9f93c932 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/SqlExecutionReporter.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import org.apache.druid.java.util.common.DateTimes; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.java.util.emitter.service.ServiceEmitter; +import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.apache.druid.query.QueryContext; +import org.apache.druid.query.QueryInterruptedException; +import org.apache.druid.query.QueryTimeoutException; +import org.apache.druid.server.QueryStats; +import org.apache.druid.server.RequestLogLine; +import org.apache.druid.sql.calcite.planner.PlannerContext; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +/** + * Side-car class which reports logs and metrics for an + * {@link HttpStatement}. This separate class cleanly separates the logic + * for running a query from the logic for reporting on that run. A query + * can end either with a success or error. This object is created in + * the request thread, with the remaining method called either from the + * request or response thread, but not both. + */ +public class SqlExecutionReporter +{ + private static final Logger log = new Logger(SqlExecutionReporter.class); + + private final AbstractStatement stmt; + private final String remoteAddress; + private final long startMs; + private final long startNs; + private Throwable e; + private long bytesWritten; + + public SqlExecutionReporter( + final AbstractStatement stmt, + final String remoteAddress + ) + { + this.stmt = stmt; + this.remoteAddress = remoteAddress; + this.startMs = System.currentTimeMillis(); + this.startNs = System.nanoTime(); + } + + public void failed(Throwable e) + { + this.e = e; + } + + public void succeeded(final long bytesWritten) + { + this.bytesWritten = bytesWritten; + } + + public void emit() + { + final boolean success = e == null; + final long queryTimeNs = System.nanoTime() - startNs; + + ServiceEmitter emitter = stmt.sqlToolbox.emitter; + PlannerContext plannerContext = stmt.plannerContext; + try { + ServiceMetricEvent.Builder metricBuilder = ServiceMetricEvent.builder(); + if (plannerContext != null) { + metricBuilder.setDimension("id", plannerContext.getSqlQueryId()); + metricBuilder.setDimension("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); + } + if (stmt.fullResourceActions != null) { + metricBuilder.setDimension( + "dataSource", + stmt.fullResourceActions + .stream() + .map(action -> action.getResource().getName()) + .collect(Collectors.toList()) + .toString() + ); + } + metricBuilder.setDimension("remoteAddress", StringUtils.nullToEmptyNonDruidDataString(remoteAddress)); + metricBuilder.setDimension("success", String.valueOf(success)); + emitter.emit(metricBuilder.build("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs))); + if (bytesWritten >= 0) { + emitter.emit(metricBuilder.build("sqlQuery/bytes", bytesWritten)); + } + + final Map statsMap = new LinkedHashMap<>(); + statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)); + statsMap.put("sqlQuery/bytes", bytesWritten); + statsMap.put("success", success); + QueryContext queryContext; + if (plannerContext == null) { + queryContext = stmt.queryPlus.context(); + } else { + statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity()); + queryContext = stmt.queryPlus.context(); + queryContext.addSystemParam("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); + } + final Map context = queryContext.getMergedParams(); + statsMap.put("context", context); + if (e != null) { + statsMap.put("exception", e.toString()); + + if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) { + statsMap.put("interrupted", true); + statsMap.put("reason", e.toString()); + } + } + + stmt.sqlToolbox.requestLogger.logSqlQuery( + RequestLogLine.forSql( + stmt.queryPlus.sql(), + context, + DateTimes.utc(startMs), + remoteAddress, + new QueryStats(statsMap) + ) + ); + } + catch (Exception ex) { + log.error(ex, "Unable to log SQL [%s]!", stmt.queryPlus.sql()); + } + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java deleted file mode 100644 index b3b9fee8266..00000000000 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ /dev/null @@ -1,592 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.sql; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.errorprone.annotations.concurrent.GuardedBy; -import org.apache.calcite.avatica.remote.TypedValue; -import org.apache.calcite.sql.parser.SqlParseException; -import org.apache.calcite.tools.RelConversionException; -import org.apache.calcite.tools.ValidationException; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.SequenceWrapper; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.druid.query.DefaultQueryConfig; -import org.apache.druid.query.QueryContext; -import org.apache.druid.query.QueryContexts; -import org.apache.druid.query.QueryInterruptedException; -import org.apache.druid.query.QueryTimeoutException; -import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.server.QueryScheduler; -import org.apache.druid.server.QueryStats; -import org.apache.druid.server.RequestLogLine; -import org.apache.druid.server.log.RequestLogger; -import org.apache.druid.server.security.Access; -import org.apache.druid.server.security.AuthConfig; -import org.apache.druid.server.security.AuthenticationResult; -import org.apache.druid.server.security.AuthorizationUtils; -import org.apache.druid.server.security.ForbiddenException; -import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.sql.calcite.planner.DruidPlanner; -import org.apache.druid.sql.calcite.planner.PlannerContext; -import org.apache.druid.sql.calcite.planner.PlannerFactory; -import org.apache.druid.sql.calcite.planner.PlannerResult; -import org.apache.druid.sql.calcite.planner.PrepareResult; -import org.apache.druid.sql.calcite.table.RowSignatures; -import org.apache.druid.sql.http.SqlParameter; -import org.apache.druid.sql.http.SqlQuery; - -import javax.annotation.Nullable; -import javax.servlet.http.HttpServletRequest; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Similar to {@link org.apache.druid.server.QueryLifecycle}, this class manages the lifecycle of a SQL query. - * It ensures that a SQL query goes through the following stages, in the proper order: - * - *
    - *
  1. Initialization ({@link #initialize(String, QueryContext)})
  2. - *
  3. Validation and Authorization ({@link #validateAndAuthorize(HttpServletRequest)} or {@link #validateAndAuthorize(AuthenticationResult)})
  4. - *
  5. Planning ({@link #plan()})
  6. - *
  7. Execution ({@link #execute()})
  8. - *
  9. Logging ({@link #finalizeStateAndEmitLogsAndMetrics(Throwable, String, long)})
  10. - *
- * - * Every method in this class must be called by the same thread except for {@link #cancel()}. - */ -public class SqlLifecycle -{ - private static final Logger log = new Logger(SqlLifecycle.class); - - private final PlannerFactory plannerFactory; - private final ServiceEmitter emitter; - private final RequestLogger requestLogger; - private final QueryScheduler queryScheduler; - private final AuthConfig authConfig; - private final DefaultQueryConfig defaultQueryConfig; - private final long startMs; - private final long startNs; - - /** - * This lock coordinates the access to {@link #state} as there is a happens-before relationship - * between {@link #cancel} and {@link #transition}. - */ - private final Object stateLock = new Object(); - @GuardedBy("stateLock") - private State state = State.NEW; - - // init during initialize - private String sql; - private QueryContext queryContext; - private List parameters; - - // init during plan - /** - * The Druid planner follows the SQL statement through the lifecycle. - * The planner's state is start --> validate --> (prepare | plan). - */ - private DruidPlanner planner; - private PlannerContext plannerContext; - private PrepareResult prepareResult; - private Set resourceActions; - private PlannerResult plannerResult; - - public SqlLifecycle( - PlannerFactory plannerFactory, - ServiceEmitter emitter, - RequestLogger requestLogger, - QueryScheduler queryScheduler, - AuthConfig authConfig, - DefaultQueryConfig defaultQueryConfig, - long startMs, - long startNs - ) - { - this.plannerFactory = plannerFactory; - this.emitter = emitter; - this.requestLogger = requestLogger; - this.queryScheduler = queryScheduler; - this.authConfig = authConfig; - this.defaultQueryConfig = defaultQueryConfig; - this.startMs = startMs; - this.startNs = startNs; - this.parameters = Collections.emptyList(); - } - - /** - * Initialize the query lifecycle, setting the raw string SQL, initial query context, and assign a sql query id. - * - * If successful (it will be), it will transition the lifecycle to {@link State#INITIALIZED}. - */ - public String initialize(String sql, QueryContext queryContext) - { - transition(State.NEW, State.INITIALIZED); - this.sql = sql; - this.queryContext = contextWithSqlId(queryContext); - this.queryContext.addDefaultParams(defaultQueryConfig.getContext()); - return sqlQueryId(); - } - - private QueryContext contextWithSqlId(QueryContext queryContext) - { - // "bySegment" results are never valid to use with SQL because the result format is incompatible - // so, overwrite any user specified context to avoid exceptions down the line - - if (queryContext.removeUserParam(QueryContexts.BY_SEGMENT_KEY) != null) { - log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter"); - } - queryContext.addDefaultParam(PlannerContext.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); - return queryContext; - } - - private String sqlQueryId() - { - return queryContext.getAsString(PlannerContext.CTX_SQL_QUERY_ID); - } - - /** - * Assign dynamic parameters to be used to substitute values during query execution. This can be performed at any - * part of the lifecycle. - */ - public void setParameters(List parameters) - { - this.parameters = parameters; - if (this.plannerContext != null) { - this.plannerContext.setParameters(parameters); - } - } - - /** - * Validate SQL query and authorize against any datasources or views which will take part in the query. - * - * If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to - * {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}. - */ - public void validateAndAuthorize(AuthenticationResult authenticationResult) - { - synchronized (stateLock) { - if (state == State.AUTHORIZED) { - return; - } - } - transition(State.INITIALIZED, State.AUTHORIZING); - validate(authenticationResult); - doAuthorize(resourceActions -> - AuthorizationUtils.authorizeAllResourceActions( - authenticationResult, - resourceActions, - plannerFactory.getAuthorizerMapper() - ) - ); - } - - /** - * Validate SQL query and authorize against any datasources or views which the query. Like - * {@link #validateAndAuthorize(AuthenticationResult)} but for a {@link HttpServletRequest}. - * - * If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to - * {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}. - */ - public void validateAndAuthorize(HttpServletRequest req) - { - transition(State.INITIALIZED, State.AUTHORIZING); - AuthenticationResult authResult = AuthorizationUtils.authenticationResultFromRequest(req); - validate(authResult); - doAuthorize(resourceActions -> - AuthorizationUtils.authorizeAllResourceActions( - req, - resourceActions, - plannerFactory.getAuthorizerMapper() - ) - ); - } - - /** - * Perform the validation step on the Druid planner, leaving the planner - * ready to perform either prepare or plan. - */ - private void validate(AuthenticationResult authenticationResult) - { - try { - planner = plannerFactory.createPlanner(sql, queryContext); - // set planner context for logs/metrics in case something explodes early - plannerContext = planner.getPlannerContext(); - plannerContext.setAuthenticationResult(authenticationResult); - // set parameters on planner context, if parameters have already been set - plannerContext.setParameters(parameters); - planner.validate(); - - // Capture the resource actions as these are reference past the - // life of the planner itself. - resourceActions = planner.resourceActions(authConfig.authorizeQueryContextParams()); - } - // we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors. - catch (SqlParseException e) { - throw new SqlPlanningException(e); - } - catch (ValidationException e) { - throw new SqlPlanningException(e); - } - } - - private void doAuthorize(Function, Access> authorizer) - { - Access authorizationResult = planner.authorize( - authorizer, - authConfig.authorizeQueryContextParams() - ); - if (!authorizationResult.isAllowed()) { - // Not authorized; go straight to Jail, do not pass Go. - transition(State.AUTHORIZING, State.UNAUTHORIZED); - } else { - transition(State.AUTHORIZING, State.AUTHORIZED); - } - if (!authorizationResult.isAllowed()) { - throw new ForbiddenException(authorizationResult.toString()); - } - } - - /** - * Prepare the query lifecycle for execution, without completely planning into - * something that is executable, but including some initial parsing and - * validation and any dynamic parameter type resolution, to support prepared - * statements via JDBC. - * - * The planner must have already performed the validation step: the planner - * state is reused here. - */ - public PrepareResult prepare() - { - synchronized (stateLock) { - if (state != State.AUTHORIZED) { - throw new ISE("Cannot prepare because current state [%s] is not [%s].", state, State.AUTHORIZED); - } - } - Preconditions.checkNotNull(plannerContext, "Cannot prepare, plannerContext is null"); - try { - this.prepareResult = planner.prepare(); - return prepareResult; - } - catch (ValidationException e) { - throw new SqlPlanningException(e); - } - } - - /** - * Plan the query to enable execution. - * - * The planner must have already performed the validation step: the planner - * state is reused here. - * - * If successful, the lifecycle will first transition from - * {@link State#AUTHORIZED} to {@link State#PLANNED}. - */ - public void plan() throws RelConversionException - { - transition(State.AUTHORIZED, State.PLANNED); - Preconditions.checkNotNull(plannerContext, "Cannot plan, plannerContext is null"); - try { - this.plannerResult = planner.plan(); - } - catch (ValidationException e) { - throw new SqlPlanningException(e); - } - finally { - // Done with the planner, close it. - planner.close(); - planner = null; - } - } - - /** - * This method must be called after {@link #plan()}. - */ - public SqlRowTransformer createRowTransformer() - { - assert plannerContext != null; - assert plannerResult != null; - - return new SqlRowTransformer(plannerContext.getTimeZone(), plannerResult.rowType()); - } - - @VisibleForTesting - PlannerContext getPlannerContext() - { - return plannerContext; - } - - /** - * Execute the fully planned query. - * - * If successful, the lifecycle will first transition from {@link State#PLANNED} to {@link State#EXECUTING}. - */ - public Sequence execute() - { - transition(State.PLANNED, State.EXECUTING); - return plannerResult.run(); - } - - /** - * Only for testing... returns result row signature and sequence of results - */ - @VisibleForTesting - public Pair> runSimple( - String sql, - Map queryContext, - List parameters, - AuthenticationResult authenticationResult - ) throws RelConversionException - { - Sequence result; - - initialize(sql, new QueryContext(queryContext)); - try { - setParameters(SqlQuery.getParameterList(parameters)); - validateAndAuthorize(authenticationResult); - plan(); - result = execute(); - } - catch (Throwable e) { - if (!(e instanceof ForbiddenException)) { - finalizeStateAndEmitLogsAndMetrics(e, null, -1); - } - throw e; - } - - return new Pair<>( - RowSignatures.fromRelDataType(plannerResult.rowType().getFieldNames(), plannerResult.rowType()), - Sequences.wrap( - result, - new SequenceWrapper() - { - @Override - public void after(boolean isDone, Throwable thrown) - { - finalizeStateAndEmitLogsAndMetrics(thrown, null, -1); - } - } - ) - ); - } - - @VisibleForTesting - public Set runAnalyzeResources(AuthenticationResult authenticationResult) - { - validate(authenticationResult); - return getRequiredResourceActions(); - } - - public Set getRequiredResourceActions() - { - return resourceActions; - } - - /** - * Cancel all native queries associated with this lifecycle. - * - * This method is thread-safe. - */ - public void cancel() - { - synchronized (stateLock) { - if (state == State.CANCELLED) { - return; - } - state = State.CANCELLED; - } - - final CopyOnWriteArrayList nativeQueryIds = plannerContext.getNativeQueryIds(); - - for (String nativeQueryId : nativeQueryIds) { - log.debug("Canceling native query [%s]", nativeQueryId); - queryScheduler.cancelQuery(nativeQueryId); - } - } - - /** - * Emit logs and metrics for this query. - * - * @param e exception that occurred while processing this query - * @param remoteAddress remote address, for logging; or null if unknown - * @param bytesWritten number of bytes written; will become a query/bytes metric if >= 0 - */ - public void finalizeStateAndEmitLogsAndMetrics( - @Nullable final Throwable e, - @Nullable final String remoteAddress, - final long bytesWritten - ) - { - if (queryContext == null) { - // Never initialized, don't log or emit anything. - return; - } - - synchronized (stateLock) { - assert state != State.UNAUTHORIZED; // should not emit below metrics when the query fails to authorize - - if (state != State.CANCELLED) { - if (state == State.DONE) { - log.warn("Tried to emit logs and metrics twice for query [%s]!", sqlQueryId()); - } - - state = State.DONE; - } - } - - final Set actions; - if (planner != null) { - actions = getRequiredResourceActions(); - planner.close(); - planner = null; - } else { - actions = null; - } - - final boolean success = e == null; - final long queryTimeNs = System.nanoTime() - startNs; - - try { - ServiceMetricEvent.Builder metricBuilder = ServiceMetricEvent.builder(); - if (plannerContext != null) { - metricBuilder.setDimension("id", plannerContext.getSqlQueryId()); - metricBuilder.setDimension("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); - } - if (actions != null) { - metricBuilder.setDimension( - "dataSource", - actions - .stream() - .map(action -> action.getResource().getName()) - .collect(Collectors.toList()) - .toString() - ); - } - metricBuilder.setDimension("remoteAddress", StringUtils.nullToEmptyNonDruidDataString(remoteAddress)); - metricBuilder.setDimension("success", String.valueOf(success)); - emitter.emit(metricBuilder.build("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs))); - if (bytesWritten >= 0) { - emitter.emit(metricBuilder.build("sqlQuery/bytes", bytesWritten)); - } - - final Map statsMap = new LinkedHashMap<>(); - statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)); - statsMap.put("sqlQuery/bytes", bytesWritten); - statsMap.put("success", success); - if (plannerContext != null) { - statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity()); - queryContext.addSystemParam("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); - } - final Map context = queryContext.getMergedParams(); - statsMap.put("context", context); - if (e != null) { - statsMap.put("exception", e.toString()); - - if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) { - statsMap.put("interrupted", true); - statsMap.put("reason", e.toString()); - } - } - - requestLogger.logSqlQuery( - RequestLogLine.forSql( - sql, - context, - DateTimes.utc(startMs), - remoteAddress, - new QueryStats(statsMap) - ) - ); - } - catch (Exception ex) { - log.error(ex, "Unable to log SQL [%s]!", sql); - } - } - - @VisibleForTesting - public State getState() - { - synchronized (stateLock) { - return state; - } - } - - @VisibleForTesting - QueryContext getQueryContext() - { - return queryContext; - } - - private void transition(final State from, final State to) - { - synchronized (stateLock) { - if (state == State.CANCELLED) { - throw new QueryInterruptedException( - QueryInterruptedException.QUERY_CANCELLED, - StringUtils.format("Query is canceled [%s]", sqlQueryId()), - null, - null - ); - } - if (state != from) { - throw new ISE( - "Cannot transition from [%s] to [%s] because current state [%s] is not [%s].", - from, - to, - state, - from - ); - } - - state = to; - } - } - - enum State - { - NEW, - INITIALIZED, - AUTHORIZING, - AUTHORIZED, - PLANNED, - EXECUTING, - - // final states - UNAUTHORIZED, - CANCELLED, // query is cancelled. can be transitioned to this state only after AUTHORIZED. - DONE // query could either succeed or fail - } -} diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleManager.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycleManager.java index 8b222eb569a..be8e5ec8ca6 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleManager.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycleManager.java @@ -22,24 +22,27 @@ package org.apache.druid.sql; import com.google.common.collect.ImmutableList; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.druid.guice.LazySingleton; -import org.apache.druid.sql.SqlLifecycle.State; +import org.apache.druid.server.security.ResourceAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; /** - * This class manages only _authorized_ {@link SqlLifecycle}s submitted via HTTP, - * such as {@link org.apache.druid.sql.http.SqlResource}. The main use case of this class is - * tracking running queries so that the cancel API can identify the lifecycles to cancel. + * This class manages only authorized {@link DirectStatement}s submitted via + * HTTP, such as {@link org.apache.druid.sql.http.SqlResource}. The main use case of + * this class is tracking running queries so that the cancel API can identify + * the statements to cancel. * - * This class is thread-safe as there are 2 or more threads that can access lifecycles at the same time - * for query running or query canceling. + * This class is thread-safe as there are 2 or more threads that can access + * statements at the same time for query running or query canceling. * - * For managing and canceling native queries, see {@link org.apache.druid.server.QueryScheduler}. - * As its name indicates, it also performs resource scheduling for native queries based on query lanes + * For managing and canceling native queries, see + * {@link org.apache.druid.server.QueryScheduler}. As its name indicates, it + * also performs resource scheduling for native queries based on query lanes * {@link org.apache.druid.server.QueryLaningStrategy}. * * @see org.apache.druid.server.QueryScheduler#cancelQuery(String) @@ -47,15 +50,20 @@ import java.util.Map; @LazySingleton public class SqlLifecycleManager { + public interface Cancelable + { + Set resources(); + void cancel(); + } + private final Object lock = new Object(); @GuardedBy("lock") - private final Map> sqlLifecycles = new HashMap<>(); + private final Map> sqlLifecycles = new HashMap<>(); - public void add(String sqlQueryId, SqlLifecycle lifecycle) + public void add(String sqlQueryId, Cancelable lifecycle) { synchronized (lock) { - assert lifecycle.getState() == State.AUTHORIZED; sqlLifecycles.computeIfAbsent(sqlQueryId, k -> new ArrayList<>()) .add(lifecycle); } @@ -65,10 +73,10 @@ public class SqlLifecycleManager * Removes the given lifecycle of the given query ID. * This method uses {@link Object#equals} to find the lifecycle matched to the given parameter. */ - public void remove(String sqlQueryId, SqlLifecycle lifecycle) + public void remove(String sqlQueryId, Cancelable lifecycle) { synchronized (lock) { - List lifecycles = sqlLifecycles.get(sqlQueryId); + List lifecycles = sqlLifecycles.get(sqlQueryId); if (lifecycles != null) { lifecycles.remove(lifecycle); if (lifecycles.isEmpty()) { @@ -82,10 +90,10 @@ public class SqlLifecycleManager * For the given sqlQueryId, this method removes all lifecycles that match to the given list of lifecycles. * This method uses {@link Object#equals} for matching lifecycles. */ - public void removeAll(String sqlQueryId, List lifecyclesToRemove) + public void removeAll(String sqlQueryId, List lifecyclesToRemove) { synchronized (lock) { - List lifecycles = sqlLifecycles.get(sqlQueryId); + List lifecycles = sqlLifecycles.get(sqlQueryId); if (lifecycles != null) { lifecycles.removeAll(lifecyclesToRemove); if (lifecycles.isEmpty()) { @@ -98,10 +106,10 @@ public class SqlLifecycleManager /** * Returns a snapshot of the lifecycles for the given sqlQueryId. */ - public List getAll(String sqlQueryId) + public List getAll(String sqlQueryId) { synchronized (lock) { - List lifecycles = sqlLifecycles.get(sqlQueryId); + List lifecycles = sqlLifecycles.get(sqlQueryId); return lifecycles == null ? Collections.emptyList() : ImmutableList.copyOf(lifecycles); } } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlQueryPlus.java b/sql/src/main/java/org/apache/druid/sql/SqlQueryPlus.java index bebf74b1a34..ebd43fb6a37 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlQueryPlus.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlQueryPlus.java @@ -31,11 +31,14 @@ import java.util.List; import java.util.Map; /** - * Captures the inputs to a SQL execution request: the statement, - * the context, parameters, and the authorization result. Pass this - * around rather than the quad of items. The request can evolve: - * items can be filled in later as needed (except for the SQL - * and auth result, which are required.) + * Captures the inputs to a SQL execution request: the statement,the context, + * parameters, and the authorization result. Pass this around rather than the + * quad of items. The request can evolve: the context and parameters can be + * filled in later as needed. + *

+ * SQL requests come from a variety of sources in a variety of formats. Use + * the {@link Builder} class to create an instance from the information + * available at each point in the code. */ public class SqlQueryPlus { @@ -61,39 +64,19 @@ public class SqlQueryPlus this.authResult = Preconditions.checkNotNull(authResult); } - public SqlQueryPlus(final String sql, final AuthenticationResult authResult) + public static Builder builder() { - this(sql, (QueryContext) null, null, authResult); + return new Builder(); } - public static SqlQueryPlus fromSqlParameters( - String sql, - Map queryContext, - List parameters, - AuthenticationResult authResult - ) + public static Builder builder(String sql) { - return new SqlQueryPlus( - sql, - queryContext == null ? null : new QueryContext(queryContext), - parameters == null ? null : SqlQuery.getParameterList(parameters), - authResult - ); + return new Builder().sql(sql); } - public static SqlQueryPlus from( - String sql, - Map queryContext, - List parameters, - AuthenticationResult authResult - ) + public static Builder builder(SqlQuery sqlQuery) { - return new SqlQueryPlus( - sql, - queryContext == null ? null : new QueryContext(queryContext), - parameters, - authResult - ); + return new Builder().query(sqlQuery); } public String sql() @@ -121,8 +104,75 @@ public class SqlQueryPlus return new SqlQueryPlus(sql, context, parameters, authResult); } + public SqlQueryPlus withContext(Map context) + { + return new SqlQueryPlus(sql, new QueryContext(context), parameters, authResult); + } + public SqlQueryPlus withParameters(List parameters) { return new SqlQueryPlus(sql, queryContext, parameters, authResult); } + + public static class Builder + { + private String sql; + private QueryContext queryContext; + private List parameters; + private AuthenticationResult authResult; + + public Builder sql(String sql) + { + this.sql = sql; + return this; + } + + public Builder query(SqlQuery sqlQuery) + { + this.sql = sqlQuery.getQuery(); + this.queryContext = new QueryContext(sqlQuery.getContext()); + this.parameters = sqlQuery.getParameterList(); + return this; + } + + public Builder context(QueryContext queryContext) + { + this.queryContext = queryContext; + return this; + } + + public Builder context(Map queryContext) + { + this.queryContext = queryContext == null ? null : new QueryContext(queryContext); + return this; + } + + public Builder parameters(List parameters) + { + this.parameters = parameters; + return this; + } + + public Builder sqlParameters(List parameters) + { + this.parameters = parameters == null ? null : SqlQuery.getParameterList(parameters); + return this; + } + + public Builder auth(final AuthenticationResult authResult) + { + this.authResult = authResult; + return this; + } + + public SqlQueryPlus build() + { + return new SqlQueryPlus( + sql, + queryContext, + parameters, + authResult + ); + } + } } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java b/sql/src/main/java/org/apache/druid/sql/SqlStatementFactory.java similarity index 57% rename from sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java rename to sql/src/main/java/org/apache/druid/sql/SqlStatementFactory.java index 52db20f820c..f1481b8d1fe 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlStatementFactory.java @@ -28,46 +28,52 @@ import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; +import org.apache.druid.sql.http.SqlQuery; + +import javax.servlet.http.HttpServletRequest; @LazySingleton -public class SqlLifecycleFactory +public class SqlStatementFactory { - private final PlannerFactory plannerFactory; - private final ServiceEmitter emitter; - private final RequestLogger requestLogger; - private final QueryScheduler queryScheduler; - private final AuthConfig authConfig; - private final DefaultQueryConfig defaultQueryConfig; + protected final SqlToolbox lifecycleToolbox; @Inject - public SqlLifecycleFactory( - PlannerFactory plannerFactory, - ServiceEmitter emitter, - RequestLogger requestLogger, - QueryScheduler queryScheduler, - AuthConfig authConfig, - Supplier defaultQueryConfig + public SqlStatementFactory( + final PlannerFactory plannerFactory, + final ServiceEmitter emitter, + final RequestLogger requestLogger, + final QueryScheduler queryScheduler, + final AuthConfig authConfig, + final Supplier defaultQueryConfig, + final SqlLifecycleManager sqlLifecycleManager ) { - this.plannerFactory = plannerFactory; - this.emitter = emitter; - this.requestLogger = requestLogger; - this.queryScheduler = queryScheduler; - this.authConfig = authConfig; - this.defaultQueryConfig = defaultQueryConfig.get(); - } - - public SqlLifecycle factorize() - { - return new SqlLifecycle( + this.lifecycleToolbox = new SqlToolbox( plannerFactory, emitter, requestLogger, queryScheduler, authConfig, - defaultQueryConfig, - System.currentTimeMillis(), - System.nanoTime() + defaultQueryConfig.get(), + sqlLifecycleManager ); } + + public HttpStatement httpStatement( + final SqlQuery sqlQuery, + final HttpServletRequest req + ) + { + return new HttpStatement(lifecycleToolbox, sqlQuery, req); + } + + public DirectStatement directStatement(final SqlQueryPlus sqlRequest) + { + return new DirectStatement(lifecycleToolbox, sqlRequest); + } + + public PreparedStatement preparedStatement(final SqlQueryPlus sqlRequest) + { + return new PreparedStatement(lifecycleToolbox, sqlRequest); + } } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlToolbox.java b/sql/src/main/java/org/apache/druid/sql/SqlToolbox.java new file mode 100644 index 00000000000..8ce9c7a3cdc --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/SqlToolbox.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import com.google.common.base.Preconditions; +import org.apache.druid.java.util.emitter.service.ServiceEmitter; +import org.apache.druid.query.DefaultQueryConfig; +import org.apache.druid.server.QueryScheduler; +import org.apache.druid.server.log.RequestLogger; +import org.apache.druid.server.security.AuthConfig; +import org.apache.druid.sql.calcite.planner.PlannerFactory; + +/** + * Provides the plan and execution resources to process SQL queries. + */ +public class SqlToolbox +{ + final PlannerFactory plannerFactory; + final ServiceEmitter emitter; + final RequestLogger requestLogger; + final QueryScheduler queryScheduler; + final AuthConfig authConfig; + final DefaultQueryConfig defaultQueryConfig; + final SqlLifecycleManager sqlLifecycleManager; + + public SqlToolbox( + final PlannerFactory plannerFactory, + final ServiceEmitter emitter, + final RequestLogger requestLogger, + final QueryScheduler queryScheduler, + final AuthConfig authConfig, + final DefaultQueryConfig defaultQueryConfig, + final SqlLifecycleManager sqlLifecycleManager + ) + { + this.plannerFactory = plannerFactory; + this.emitter = emitter; + this.requestLogger = requestLogger; + this.queryScheduler = queryScheduler; + this.authConfig = authConfig; + this.defaultQueryConfig = defaultQueryConfig; + this.sqlLifecycleManager = Preconditions.checkNotNull(sqlLifecycleManager, "sqlLifecycleManager"); + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/AbstractDruidJdbcStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/AbstractDruidJdbcStatement.java index 399ecf673fb..ff7bd916607 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/AbstractDruidJdbcStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/AbstractDruidJdbcStatement.java @@ -35,7 +35,6 @@ import java.sql.Array; import java.sql.DatabaseMetaData; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.ExecutorService; /** * Common implementation for the JDBC {@code Statement} and @@ -55,16 +54,17 @@ public abstract class AbstractDruidJdbcStatement implements Closeable { public static final long START_OFFSET = 0; - protected final DruidConnection connection; + protected final String connectionId; protected final int statementId; + protected Throwable throwable; protected DruidJdbcResultSet resultSet; public AbstractDruidJdbcStatement( - final DruidConnection connection, + final String connectionId, final int statementId ) { - this.connection = Preconditions.checkNotNull(connection, "connection"); + this.connectionId = Preconditions.checkNotNull(connectionId, "connectionId"); this.statementId = statementId; } @@ -242,16 +242,11 @@ public abstract class AbstractDruidJdbcStatement implements Closeable public String getConnectionId() { - return connection.getConnectionId(); + return connectionId; } public int getStatementId() { return statementId; } - - public ExecutorService executor() - { - return connection.executor(); - } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 6c5238fc455..0c430b92163 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -20,22 +20,19 @@ package org.apache.druid.sql.avatica; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.errorprone.annotations.concurrent.GuardedBy; -import org.apache.calcite.tools.RelConversionException; import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.QueryContext; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.PreparedStatement; import org.apache.druid.sql.SqlQueryPlus; -import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.SqlStatementFactory; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -49,12 +46,13 @@ public class DruidConnection private final String connectionId; private final int maxStatements; - private final Map userSecret; - private final Map context; + private final ImmutableMap userSecret; + private final QueryContext context; private final AtomicInteger statementCounter = new AtomicInteger(); private final AtomicReference> timeoutFuture = new AtomicReference<>(); - private final ExecutorService yielderOpenCloseExecutor; + // Typically synchronized by connectionLock, except in one case: the onClose function passed + // into DruidStatements contained by the map. @GuardedBy("connectionLock") private final ConcurrentMap statements = new ConcurrentHashMap<>(); private final Object connectionLock = new Object(); @@ -66,19 +64,13 @@ public class DruidConnection final String connectionId, final int maxStatements, final Map userSecret, - final Map context + final QueryContext context ) { this.connectionId = Preconditions.checkNotNull(connectionId); this.maxStatements = maxStatements; this.userSecret = ImmutableMap.copyOf(userSecret); - this.context = Preconditions.checkNotNull(context); - this.yielderOpenCloseExecutor = Execs.singleThreaded( - StringUtils.format( - "JDBCYielderOpenCloseExecutor-connection-%s", - StringUtils.encodeForFormat(connectionId) - ) - ); + this.context = context; } public String getConnectionId() @@ -86,16 +78,7 @@ public class DruidConnection return connectionId; } - public QueryContext makeContext() - { - // QueryContext constructor copies the context parameters. - // we don't want to stringify arrays for JDBC ever because Avatica needs to handle this - final QueryContext queryContext = new QueryContext(context); - queryContext.addSystemParam(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); - return queryContext; - } - - public DruidJdbcStatement createStatement(SqlLifecycleFactory sqlLifecycleFactory) + public DruidJdbcStatement createStatement(SqlStatementFactory sqlLifecycleFactory) { final int statementId = statementCounter.incrementAndGet(); @@ -112,8 +95,9 @@ public class DruidConnection @SuppressWarnings("GuardedBy") final DruidJdbcStatement statement = new DruidJdbcStatement( - this, + connectionId, statementId, + context, sqlLifecycleFactory ); @@ -124,8 +108,8 @@ public class DruidConnection } public DruidJdbcPreparedStatement createPreparedStatement( - SqlLifecycleFactory sqlLifecycleFactory, - SqlQueryPlus queryPlus, + SqlStatementFactory sqlLifecycleFactory, + SqlQueryPlus sqlRequest, final long maxRowCount) { final int statementId = statementCounter.incrementAndGet(); @@ -141,14 +125,16 @@ public class DruidConnection throw DruidMeta.logFailure(new ISE("Too many open statements, limit is [%,d]", maxStatements)); } + @SuppressWarnings("GuardedBy") + final PreparedStatement statement = sqlLifecycleFactory.preparedStatement( + sqlRequest.withContext(context) + ); final DruidJdbcPreparedStatement jdbcStmt = new DruidJdbcPreparedStatement( - this, + connectionId, statementId, - queryPlus, - sqlLifecycleFactory, + statement, maxRowCount ); - jdbcStmt.prepare(); statements.put(statementId, jdbcStmt); LOG.debug("Connection [%s] opened prepared statement [%s].", connectionId, statementId); @@ -156,16 +142,6 @@ public class DruidConnection } } - public void prepareAndExecute( - final DruidJdbcStatement druidStatement, - final SqlQueryPlus queryPlus, - final long maxRowCount - ) throws RelConversionException - { - Preconditions.checkNotNull(context, "JDBC connection context is null!"); - druidStatement.execute(queryPlus.withContext(makeContext()), maxRowCount); - } - public AbstractDruidJdbcStatement getStatement(final int statementId) { synchronized (connectionLock) { @@ -205,8 +181,8 @@ public class DruidConnection public void close() { synchronized (connectionLock) { - open = false; - for (AbstractDruidJdbcStatement statement : statements.values()) { + // Copy statements before iterating because statement.close() modifies it. + for (AbstractDruidJdbcStatement statement : ImmutableList.copyOf(statements.values())) { try { statement.close(); } @@ -214,9 +190,9 @@ public class DruidConnection LOG.warn("Connection [%s] failed to close statement [%s]!", connectionId, statement.getStatementId()); } } - statements.clear(); - yielderOpenCloseExecutor.shutdownNow(); + LOG.debug("Connection [%s] closed.", connectionId); + open = false; } } @@ -233,9 +209,4 @@ public class DruidConnection { return userSecret; } - - public ExecutorService executor() - { - return yielderOpenCloseExecutor; - } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcPreparedStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcPreparedStatement.java index 5f23b8ee4d2..428dcbf6ef6 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcPreparedStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcPreparedStatement.java @@ -24,9 +24,8 @@ import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.remote.TypedValue; import org.apache.druid.java.util.common.ISE; import org.apache.druid.server.security.ForbiddenException; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; -import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.PreparedStatement; import org.apache.druid.sql.calcite.planner.PrepareResult; import java.util.List; @@ -42,54 +41,46 @@ import java.util.List; */ public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement { - private final SqlLifecycle sqlStatement; - private final SqlQueryPlus queryPlus; - private final SqlLifecycleFactory lifecycleFactory; + private final PreparedStatement sqlStatement; private final long maxRowCount; private Meta.Signature signature; private State state = State.NEW; public DruidJdbcPreparedStatement( - final DruidConnection connection, + final String connectionId, final int statementId, - final SqlQueryPlus queryPlus, - final SqlLifecycleFactory lifecycleFactory, + final PreparedStatement stmt, final long maxRowCount ) { - super(connection, statementId); - this.lifecycleFactory = lifecycleFactory; - this.queryPlus = queryPlus; + super(connectionId, statementId); + this.sqlStatement = stmt; this.maxRowCount = maxRowCount; - this.sqlStatement = lifecycleFactory.factorize(); - sqlStatement.initialize(queryPlus.sql(), connection.makeContext()); } public synchronized void prepare() { try { ensure(State.NEW); - sqlStatement.validateAndAuthorize(queryPlus.authResult()); PrepareResult prepareResult = sqlStatement.prepare(); signature = createSignature( prepareResult, - queryPlus.sql() + sqlStatement.sqlRequest().sql() ); state = State.PREPARED; } + // Preserve the type of forbidden and runtime exceptions. catch (ForbiddenException e) { - // Can't finalize statement in in this case. Call will fail with an - // assertion error. - DruidMeta.logFailure(e); - state = State.CLOSED; + close(); throw e; } catch (RuntimeException e) { - failed(e); + close(); throw e; } + // Wrap everything else catch (Throwable t) { - failed(t); + close(); throw new RuntimeException(t); } } @@ -106,19 +97,17 @@ public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement ensure(State.PREPARED); closeResultSet(); try { - SqlLifecycle directStmt = lifecycleFactory.factorize(); - directStmt.initialize(queryPlus.sql(), connection.makeContext()); - directStmt.setParameters(parameters); - resultSet = new DruidJdbcResultSet(this, queryPlus, directStmt, maxRowCount); + DirectStatement directStmt = sqlStatement.execute(parameters); + resultSet = new DruidJdbcResultSet(this, directStmt, maxRowCount); resultSet.execute(); } // Failure to execute does not close the prepared statement. catch (RuntimeException e) { - failed(e); + resultSet = null; throw e; } catch (Throwable t) { - failed(t); + resultSet = null; throw new RuntimeException(t); } } @@ -134,19 +123,12 @@ public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement throw new ISE("Invalid action for state [%s]", state); } - private void failed(Throwable t) - { - super.close(); - sqlStatement.finalizeStateAndEmitLogsAndMetrics(t, null, -1); - state = State.CLOSED; - } - @Override public synchronized void close() { if (state != State.CLOSED) { super.close(); - sqlStatement.finalizeStateAndEmitLogsAndMetrics(null, null, -1); + sqlStatement.close(); } state = State.CLOSED; } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcResultSet.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcResultSet.java index d4c3eba1d01..15a5e36770d 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcResultSet.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcResultSet.java @@ -22,26 +22,27 @@ package org.apache.druid.sql.avatica; import com.google.common.base.Preconditions; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.calcite.avatica.Meta; -import org.apache.calcite.tools.RelConversionException; import org.apache.druid.java.util.common.ISE; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Yielder; import org.apache.druid.java.util.common.guava.Yielders; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlQueryPlus; -import org.apache.druid.sql.calcite.planner.PrepareResult; +import org.apache.druid.sql.DirectStatement; import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; /** * Druid's server-side representation of a JDBC result set. At most one * can be open per statement (standard or prepared). The implementation - * is based on Druid's {@link SqlLifecycle} class. Even if result + * is based on Druid's own {@link DirectStatement} class. Even if result * set is for a {@code PreparedStatement}, the result set itself uses - * a Druid {@code SqlLifecycle} which includes the parameter values + * a Druid {@code DirectStatement} which includes the parameter values * given for the execution. This allows Druid's planner to use the "query * optimized" form of parameter substitution: we replan the query for * each execution with the parameter values. @@ -72,9 +73,8 @@ public class DruidJdbcResultSet implements Closeable * https://github.com/apache/druid/pull/4288 * https://github.com/apache/druid/pull/4415 */ - private final AbstractDruidJdbcStatement jdbcStatement; - private final SqlQueryPlus sqlRequest; - private final SqlLifecycle stmt; + private final ExecutorService yielderOpenCloseExecutor; + private final DirectStatement stmt; private final long maxRowCount; private State state = State.NEW; private Meta.Signature signature; @@ -83,30 +83,27 @@ public class DruidJdbcResultSet implements Closeable public DruidJdbcResultSet( final AbstractDruidJdbcStatement jdbcStatement, - final SqlQueryPlus sqlRequest, - final SqlLifecycle stmt, + DirectStatement stmt, final long maxRowCount ) { - this.jdbcStatement = jdbcStatement; this.stmt = stmt; - this.sqlRequest = sqlRequest; this.maxRowCount = maxRowCount; + this.yielderOpenCloseExecutor = Execs.singleThreaded( + StringUtils.format( + "JDBCYielderOpenCloseExecutor-connection-%s-statement-%d", + StringUtils.encodeForFormat(jdbcStatement.getConnectionId()), + jdbcStatement.getStatementId() + ) + ); } - public synchronized void execute() throws RelConversionException + public synchronized void execute() { ensure(State.NEW); - stmt.validateAndAuthorize(sqlRequest.authResult()); - PrepareResult prepareResult = stmt.prepare(); - stmt.plan(); - signature = AbstractDruidJdbcStatement.createSignature( - prepareResult, - sqlRequest.sql() - ); try { state = State.RUNNING; - final Sequence baseSequence = jdbcStatement.executor().submit(stmt::execute).get(); + final Sequence baseSequence = yielderOpenCloseExecutor.submit(stmt::execute).get(); // We can't apply limits greater than Integer.MAX_VALUE, ignore them. final Sequence retSequence = @@ -115,6 +112,13 @@ public class DruidJdbcResultSet implements Closeable : baseSequence; yielder = Yielders.each(retSequence); + signature = AbstractDruidJdbcStatement.createSignature( + stmt.prepareResult(), + stmt.sqlRequest().sql() + ); + } + catch (ExecutionException e) { + throw closeAndPropagateThrowable(e.getCause()); } catch (Throwable t) { throw closeAndPropagateThrowable(t); @@ -180,8 +184,9 @@ public class DruidJdbcResultSet implements Closeable { DruidMeta.logFailure(t); // Report a failure so that the failure is logged. + stmt.reporter().failed(t); try { - close(t); + close(); } catch (Throwable t1) { t.addSuppressed(t1); @@ -199,11 +204,6 @@ public class DruidJdbcResultSet implements Closeable @Override public synchronized void close() - { - close(null); - } - - private void close(Throwable error) { if (state == State.NEW) { state = State.CLOSED; @@ -218,7 +218,7 @@ public class DruidJdbcResultSet implements Closeable this.yielder = null; // Put the close last, so any exceptions it throws are after we did the other cleanup above. - jdbcStatement.executor().submit( + yielderOpenCloseExecutor.submit( () -> { theYielder.close(); // makes this a Callable instead of Runnable so we don't need to catch exceptions inside the lambda @@ -226,6 +226,7 @@ public class DruidJdbcResultSet implements Closeable } ).get(); + yielderOpenCloseExecutor.shutdownNow(); } } catch (RuntimeException e) { @@ -235,7 +236,8 @@ public class DruidJdbcResultSet implements Closeable throw new RuntimeException(t); } finally { - stmt.finalizeStateAndEmitLogsAndMetrics(error, null, -1); + // Closing the statement cause logs and metrics to be emitted. + stmt.close(); } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcStatement.java index ebe64f5bdec..4d6fe45207c 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidJdbcStatement.java @@ -21,11 +21,10 @@ package org.apache.druid.sql.avatica; import com.google.common.base.Preconditions; import org.apache.calcite.avatica.Meta; -import org.apache.calcite.tools.RelConversionException; -import org.apache.druid.server.security.ForbiddenException; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.query.QueryContext; +import org.apache.druid.sql.DirectStatement; import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.SqlStatementFactory; /** * Represents Druid's version of the JDBC {@code Statement} class: @@ -34,39 +33,32 @@ import org.apache.druid.sql.SqlQueryPlus; */ public class DruidJdbcStatement extends AbstractDruidJdbcStatement { - private final SqlLifecycleFactory lifecycleFactory; - protected boolean closed; + private final SqlStatementFactory lifecycleFactory; + protected final QueryContext queryContext; public DruidJdbcStatement( - final DruidConnection connection, + final String connectionId, final int statementId, - final SqlLifecycleFactory lifecycleFactory + final QueryContext queryContext, + final SqlStatementFactory lifecycleFactory ) { - super(connection, statementId); + super(connectionId, statementId); + this.queryContext = queryContext; this.lifecycleFactory = Preconditions.checkNotNull(lifecycleFactory, "lifecycleFactory"); } - public synchronized void execute(SqlQueryPlus sqlRequest, long maxRowCount) throws RelConversionException + public synchronized void execute(SqlQueryPlus queryPlus, long maxRowCount) { closeResultSet(); - SqlLifecycle stmt = lifecycleFactory.factorize(); - stmt.initialize(sqlRequest.sql(), connection.makeContext()); + queryPlus = queryPlus.withContext(queryContext); + DirectStatement stmt = lifecycleFactory.directStatement(queryPlus); + resultSet = new DruidJdbcResultSet(this, stmt, Long.MAX_VALUE); try { - stmt.validateAndAuthorize(sqlRequest.authResult()); - resultSet = new DruidJdbcResultSet(this, sqlRequest, stmt, Long.MAX_VALUE); resultSet.execute(); } - catch (ForbiddenException e) { - // Can't finalize statement in in this case. Call will fail with an - // assertion error. - resultSet = null; - DruidMeta.logFailure(e); - throw e; - } catch (Throwable t) { - stmt.finalizeStateAndEmitLogsAndMetrics(t, null, -1); - resultSet = null; + closeResultSet(); throw t; } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index d318bd4bf38..545e10901a1 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -29,24 +29,29 @@ import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; import com.google.inject.Injector; +import org.apache.calcite.avatica.AvaticaSeverity; import org.apache.calcite.avatica.MetaImpl; import org.apache.calcite.avatica.MissingResultsException; import org.apache.calcite.avatica.NoSuchConnectionException; import org.apache.calcite.avatica.NoSuchStatementException; import org.apache.calcite.avatica.QueryState; +import org.apache.calcite.avatica.remote.AvaticaRuntimeException; +import org.apache.calcite.avatica.remote.Service.ErrorResponse; import org.apache.calcite.avatica.remote.TypedValue; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.query.QueryContext; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authenticator; import org.apache.druid.server.security.AuthenticatorMapper; import org.apache.druid.server.security.ForbiddenException; -import org.apache.druid.sql.SqlLifecycleFactory; import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.joda.time.Interval; import javax.annotation.Nonnull; @@ -100,7 +105,7 @@ public class DruidMeta extends MetaImpl "user", "password" ); - private final SqlLifecycleFactory sqlLifecycleFactory; + private final SqlStatementFactory sqlLifecycleFactory; private final ScheduledExecutorService exec; private final AvaticaServerConfig config; private final List authenticators; @@ -119,7 +124,7 @@ public class DruidMeta extends MetaImpl @Inject public DruidMeta( - final SqlLifecycleFactory sqlLifecycleFactory, + final SqlStatementFactory sqlLifecycleFactory, final AvaticaServerConfig config, final ErrorHandler errorHandler, final Injector injector @@ -156,7 +161,10 @@ public class DruidMeta extends MetaImpl } } } - openDruidConnection(ch.id, secret, contextMap); + // we don't want to stringify arrays for JDBC ever because Avatica needs to handle this + final QueryContext context = new QueryContext(contextMap); + context.addSystemParam(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); + openDruidConnection(ch.id, secret, context); } catch (NoSuchConnectionException e) { throw e; @@ -164,7 +172,7 @@ public class DruidMeta extends MetaImpl catch (Throwable t) { // we want to avoid sanitizing Avatica specific exceptions as the Avatica code can rely on them to handle issues // differently - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -182,7 +190,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -198,7 +206,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -218,7 +226,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -242,10 +250,11 @@ public class DruidMeta extends MetaImpl null, // No parameters in this path doAuthenticate(druidConnection) ); - DruidJdbcPreparedStatement stmt = druidConnection.createPreparedStatement( + DruidJdbcPreparedStatement stmt = getDruidConnection(ch.id).createPreparedStatement( sqlLifecycleFactory, sqlReq, maxRowCount); + stmt.prepare(); LOG.debug("Successfully prepared statement [%s] for execution", stmt.getStatementId()); return new StatementHandle(ch.id, stmt.getStatementId(), stmt.getSignature()); } @@ -253,7 +262,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -299,9 +308,11 @@ public class DruidMeta extends MetaImpl // Ignore "callback", this class is designed for use with LocalService which doesn't use it. final DruidJdbcStatement druidStatement = getDruidStatement(statement, DruidJdbcStatement.class); final DruidConnection druidConnection = getDruidConnection(statement.connectionId); - // No parameters for a "regular" JDBC statement. - SqlQueryPlus sqlRequest = new SqlQueryPlus(sql, null, null, doAuthenticate(druidConnection)); - druidConnection.prepareAndExecute(druidStatement, sqlRequest, maxRowCount); + AuthenticationResult authenticationResult = doAuthenticate(druidConnection); + SqlQueryPlus sqlRequest = SqlQueryPlus.builder(sql) + .auth(authenticationResult) + .build(); + druidStatement.execute(sqlRequest, maxRowCount); ExecuteResult result = doFetch(druidStatement, maxRowsInFirstFrame); LOG.debug("Successfully prepared statement [%s] and started execution", druidStatement.getStatementId()); return result; @@ -311,10 +322,32 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } + /** + * Convert a Druid exception to an Avatica exception. Avatica can pass + * along things like an error code and SQL state. There are defined + * values for security failures, so map to those. + */ + private RuntimeException mapException(Throwable t) + { + // BasicSecurityAuthenticationException is not visible here. + String className = t.getClass().getSimpleName(); + if (t instanceof ForbiddenException || + "BasicSecurityAuthenticationException".equals(className)) { + throw new AvaticaRuntimeException( + t.getMessage(), + ErrorResponse.UNAUTHORIZED_ERROR_CODE, + ErrorResponse.UNAUTHORIZED_SQL_STATE, + AvaticaSeverity.ERROR); + } + + // Let Avatica do its default mapping. + throw errorHandler.sanitize(t); + } + private ExecuteResult doFetch(AbstractDruidJdbcStatement druidStatement, int maxRows) { final Signature signature = druidStatement.getSignature(); @@ -326,7 +359,7 @@ public class DruidMeta extends MetaImpl return new ExecuteResult( ImmutableList.of( MetaResultSet.create( - druidStatement.getConnectionId(), + druidStatement.connectionId, druidStatement.statementId, false, signature, @@ -372,7 +405,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -409,7 +442,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -440,7 +473,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -468,7 +501,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -507,7 +540,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -544,7 +577,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -603,7 +636,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -673,7 +706,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -694,7 +727,7 @@ public class DruidMeta extends MetaImpl throw e; } catch (Throwable t) { - throw errorHandler.sanitize(t); + throw mapException(t); } } @@ -729,7 +762,7 @@ public class DruidMeta extends MetaImpl private DruidConnection openDruidConnection( final String connectionId, final Map userSecret, - final Map context + final QueryContext context ) { if (connectionCount.incrementAndGet() > config.getMaxConnections()) { diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java index 3a43d581a0c..a2daf4689ba 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java @@ -32,7 +32,7 @@ import org.apache.druid.server.security.ForbiddenException; /** - * ErrorHandler is a utilty class that is used to sanitize exceptions. + * ErrorHandler is a utility class that is used to sanitize exceptions. */ class ErrorHandler { @@ -61,6 +61,8 @@ class ErrorHandler if (error instanceof ForbiddenException) { return (ForbiddenException) errorResponseTransformStrategy.transformIfNeeded((ForbiddenException) error); } + // Should map BasicSecurityAuthenticationException also, but the class is not + // visible here. if (error instanceof ISE) { return (ISE) errorResponseTransformStrategy.transformIfNeeded((ISE) error); } @@ -74,7 +76,7 @@ class ErrorHandler // cannot check cause of the throwable because it cannot be cast back to the original's type // so this only checks runtime exceptions for causes if (error instanceof RuntimeException && error.getCause() instanceof SanitizableException) { - // could do `throw sanitize(error);` but just sanitizing immediatley avoids unnecessary going down multiple levels + // could do `throw sanitize(error);` but just sanitizing immediately avoids unnecessary going down multiple levels return new RuntimeException(errorResponseTransformStrategy.transformIfNeeded((SanitizableException) error.getCause())); } QueryInterruptedException wrappedError = QueryInterruptedException.wrapIfNeeded(error); @@ -84,7 +86,7 @@ class ErrorHandler /** * Check to see if something needs to be sanitized. *

- * This does this by checking to see if the ErrorResponse is different than a NoOp Error response transform strategy. + * Done by checking to see if the ErrorResponse is different than a NoOp Error response transform strategy. * * @return a boolean that returns true if error handler has an error response strategy other than the NoOp error * response strategy diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java index 0528eded385..c81843b2104 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java @@ -42,6 +42,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.math.expr.ExpressionProcessing; import org.apache.druid.query.ordering.StringComparator; import org.apache.druid.query.ordering.StringComparators; @@ -57,6 +58,7 @@ import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.ISODateTimeFormat; import javax.annotation.Nullable; + import java.math.BigDecimal; import java.nio.charset.Charset; import java.sql.Date; @@ -69,9 +71,13 @@ import java.util.regex.Pattern; /** * Utility functions for Calcite. + *

+ * See also the file {@code saffron.properties} which holds the + * character set system properties formerly set in this file. */ public class Calcites { + private static final EmittingLogger log = new EmittingLogger(Calcites.class); private static final DateTimes.UtcFormatter CALCITE_DATE_PARSER = DateTimes.wrapFormatter(ISODateTimeFormat.dateParser()); private static final DateTimes.UtcFormatter CALCITE_TIMESTAMP_PARSER = DateTimes.wrapFormatter( new DateTimeFormatterBuilder() @@ -100,26 +106,6 @@ public class Calcites // No instantiation. } - public static void setSystemProperties() - { - // These properties control the charsets used for SQL literals. I don't see a way to change this except through - // system properties, so we'll have to set those... - - final String charset = ConversionUtil.NATIVE_UTF16_CHARSET_NAME; - - // Deprecated in Calcite 1.19. See: - // https://calcite.apache.org/javadocAggregate/org/apache/calcite/util/SaffronProperties.html - System.setProperty("saffron.default.charset", Calcites.defaultCharset().name()); - System.setProperty("saffron.default.nationalcharset", Calcites.defaultCharset().name()); - System.setProperty("saffron.default.collation.name", StringUtils.format("%s$en_US", charset)); - - // The following are the current names. See org.apache.calcite.config.CalciteSystemProperty - // https://github.com/apache/calcite/blob/master/core/src/main/java/org/apache/calcite/config/CalciteSystemProperty.java - System.setProperty("calcite.default.charset", Calcites.defaultCharset().name()); - System.setProperty("calcite.default.nationalcharset", Calcites.defaultCharset().name()); - System.setProperty("calcite.default.collation.name", StringUtils.format("%s$en_US", charset)); - } - public static Charset defaultCharset() { return DEFAULT_CHARSET; @@ -144,7 +130,6 @@ public class Calcites } builder.append("'"); return isPlainAscii ? builder.toString() : "U&" + builder; - } /** @@ -273,8 +258,6 @@ public class Calcites final boolean nullable ) { - - final RelDataType dataType = typeFactory.createArrayType( createSqlTypeWithNullability(typeFactory, elementTypeName, nullable), -1 diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 33a96a46fbb..cd51800e467 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -132,6 +132,7 @@ public class DruidPlanner implements Closeable private ParsedNodes parsed; private SqlNode validatedQueryNode; private boolean authorized; + private PrepareResult prepareResult; private Set resourceActions; private RelRoot rootQueryRel; private RexBuilder rexBuilder; @@ -148,13 +149,6 @@ public class DruidPlanner implements Closeable this.queryMakerFactory = queryMakerFactory; } - private ParsedNodes parse() throws SqlParseException, ValidationException - { - resetPlanner(); - SqlNode root = planner.parse(plannerContext.getSql()); - return ParsedNodes.create(root, plannerContext.getTimeZone()); - } - /** * Validates a SQL query and populates {@link PlannerContext#getResourceActions()}. * @@ -164,7 +158,6 @@ public class DruidPlanner implements Closeable public void validate() throws SqlParseException, ValidationException { Preconditions.checkState(state == State.START); - resetPlanner(); SqlNode root = planner.parse(plannerContext.getSql()); parsed = ParsedNodes.create(root, plannerContext.getTimeZone()); @@ -244,7 +237,13 @@ public class DruidPlanner implements Closeable Preconditions.checkState(state == State.VALIDATED); rootQueryRel = planner.rel(validatedQueryNode); + doPrepare(null); + state = State.PREPARED; + return prepareResult; + } + private void doPrepare(@Nullable QueryMaker queryMaker) throws ValidationException + { final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory(); final SqlValidator validator = planner.getValidator(); final RelDataType parameterTypes = validator.getParameterRowType(validatedQueryNode); @@ -253,11 +252,13 @@ public class DruidPlanner implements Closeable if (parsed.getExplainNode() != null) { returnedRowType = getExplainStructType(typeFactory); } else { - returnedRowType = buildQueryMaker(rootQueryRel, parsed.getInsertOrReplace()).getResultType(); + if (queryMaker == null) { + queryMaker = buildQueryMaker(rootQueryRel, parsed.getInsertOrReplace()); + } + returnedRowType = queryMaker.getResultType(); } - state = State.PREPARED; - return new PrepareResult(returnedRowType, parameterTypes); + prepareResult = new PrepareResult(returnedRowType, parameterTypes); } /** @@ -285,32 +286,30 @@ public class DruidPlanner implements Closeable * an authenticated request must be authorized for to process the * query. The actions will be {@code null} if the * planner has not yet advanced to the validation step. This may occur if - * validation fails and the caller ({@code SqlLifecycle}) accesses the resource + * validation fails and the caller accesses the resource * actions as part of clean-up. */ public Set resourceActions(boolean includeContext) { - Set actions; if (includeContext) { - actions = new HashSet<>(resourceActions); + Set actions = new HashSet<>(resourceActions); plannerContext.getQueryContext().getUserParams().keySet().forEach(contextParam -> actions.add( new ResourceAction(new Resource(contextParam, ResourceType.QUERY_CONTEXT), Action.WRITE) )); + return actions; } else { - actions = resourceActions; + return resourceActions; } - return actions; } /** * Plan an SQL query for execution, returning a {@link PlannerResult} which can be used to actually execute the query. * * Ideally, the query can be planned into a native Druid query, using {@link #planWithDruidConvention}, but will - * fall back to {@link #planWithBindableConvention} if this is not possible. + * fall-back to {@link #planWithBindableConvention} if this is not possible. * * Planning reuses the validation done in `validate()` which must be called first. */ - @SuppressWarnings("RedundantThrows") public PlannerResult plan() throws ValidationException { Preconditions.checkState(state == State.VALIDATED || state == State.PREPARED); @@ -359,33 +358,17 @@ public class DruidPlanner implements Closeable return plannerContext; } + public PrepareResult prepareResult() + { + return prepareResult; + } + @Override public void close() { planner.close(); } - /** - * While the actual query might not have changed, if the druid planner is re-used, we still have the need to reset the - * {@link #planner} since we do not re-use artifacts or keep track of state between - * {@link #validate}, {@link #prepare}, and {@link #plan} and instead repeat parsing and validation - * for each step. - * - * Currently, that state tracking is done in {@link org.apache.druid.sql.SqlLifecycle}, which will create a new - * planner for each of the corresponding steps so this isn't strictly necessary at this time, this method is here as - * much to make this situation explicit and provide context for a future refactor as anything else (and some tests - * do re-use the planner between validate, prepare, and plan, which will run into this issue). - * - * This could be improved by tying {@link org.apache.druid.sql.SqlLifecycle} and {@link DruidPlanner} states more - * closely with the state of {@link #planner}, instead of repeating parsing and validation between each of these - * steps. - */ - private void resetPlanner() - { - planner.close(); - planner.reset(); - } - /** * Construct a {@link PlannerResult} for a {@link RelNode} that is directly translatable to a native Druid query. */ @@ -398,6 +381,9 @@ public class DruidPlanner implements Closeable final RelRoot possiblyLimitedRoot = possiblyWrapRootWithOuterLimitFromContext(root); final QueryMaker queryMaker = buildQueryMaker(possiblyLimitedRoot, insertOrReplace); plannerContext.setQueryMaker(queryMaker); + if (prepareResult == null) { + doPrepare(queryMaker); + } // Fall-back dynamic parameter substitution using {@link RelParameterizerShuttle} // in the event that {@link #rewriteDynamicParameters(SqlNode)} was unable to @@ -513,10 +499,7 @@ public class DruidPlanner implements Closeable @Override public Object[] next() { - // Avoids an Intellij IteratorNextCanNotThrowNoSuchElementException - // warning. - Object[] temp = (Object[]) enumerator.current(); - return temp; + return (Object[]) enumerator.current(); } }); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java index f7ceaf51f6b..b450f777694 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java @@ -83,6 +83,8 @@ public class PlannerConfig @JsonProperty private int maxNumericInFilters = NUM_FILTER_NOT_USED; + private boolean serializeComplexValues = true; + public long getMetadataSegmentPollPeriod() { return metadataSegmentPollPeriod; @@ -98,8 +100,6 @@ public class PlannerConfig return metadataSegmentCacheEnable; } - private boolean serializeComplexValues = true; - public Period getMetadataRefreshPeriod() { return metadataRefreshPeriod; @@ -174,71 +174,9 @@ public class PlannerConfig if (queryContext.isEmpty()) { return this; } - - final PlannerConfig newConfig = new PlannerConfig(); - newConfig.metadataRefreshPeriod = getMetadataRefreshPeriod(); - newConfig.maxTopNLimit = getMaxTopNLimit(); - newConfig.useApproximateCountDistinct = queryContext.getAsBoolean( - CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT, - isUseApproximateCountDistinct() - ); - newConfig.useGroupingSetForExactDistinct = queryContext.getAsBoolean( - CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, - isUseGroupingSetForExactDistinct() - ); - newConfig.useApproximateTopN = queryContext.getAsBoolean( - CTX_KEY_USE_APPROXIMATE_TOPN, - isUseApproximateTopN() - ); - newConfig.computeInnerJoinCostAsFilter = queryContext.getAsBoolean( - CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER, - computeInnerJoinCostAsFilter - ); - newConfig.useNativeQueryExplain = queryContext.getAsBoolean( - CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, - isUseNativeQueryExplain() - ); - newConfig.forceExpressionVirtualColumns = queryContext.getAsBoolean( - CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS, - isForceExpressionVirtualColumns() - ); - final int systemConfigMaxNumericInFilters = getMaxNumericInFilters(); - final int queryContextMaxNumericInFilters = queryContext.getAsInt( - CTX_MAX_NUMERIC_IN_FILTERS, - getMaxNumericInFilters() - ); - newConfig.maxNumericInFilters = validateMaxNumericInFilters(queryContextMaxNumericInFilters, - systemConfigMaxNumericInFilters); - newConfig.requireTimeCondition = isRequireTimeCondition(); - newConfig.sqlTimeZone = getSqlTimeZone(); - newConfig.awaitInitializationOnStart = isAwaitInitializationOnStart(); - newConfig.metadataSegmentCacheEnable = isMetadataSegmentCacheEnable(); - newConfig.metadataSegmentPollPeriod = getMetadataSegmentPollPeriod(); - newConfig.serializeComplexValues = shouldSerializeComplexValues(); - newConfig.authorizeSystemTablesDirectly = isAuthorizeSystemTablesDirectly(); - return newConfig; - } - - private int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters) - { - // if maxNumericInFIlters through context == 0 catch exception - // else if query context exceeds system set value throw error - if (queryContextMaxNumericInFilters == 0) { - throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS); - } else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters - && systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) { - throw new UOE( - "Expected parameter[%s] cannot exceed system set value of [%d]", - CTX_MAX_NUMERIC_IN_FILTERS, - systemConfigMaxNumericInFilters - ); - } - // if system set value is not present, thereby inferring default of -1 - if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) { - return systemConfigMaxNumericInFilters; - } - // all other cases return the valid query context value - return queryContextMaxNumericInFilters; + return toBuilder() + .withOverrides(queryContext) + .build(); } @Override @@ -302,4 +240,215 @@ public class PlannerConfig ", useNativeQueryExplain=" + useNativeQueryExplain + '}'; } + + public static Builder builder() + { + return new PlannerConfig().toBuilder(); + } + + public Builder toBuilder() + { + return new Builder(this); + } + + /** + * Builder for {@link PlannerConfig}, primarily for use in tests to + * allow setting options programmatically rather than from the command + * line or a properties file. Starts with values from an existing + * (typically default) config. + */ + public static class Builder + { + private Period metadataRefreshPeriod; + private int maxTopNLimit; + private boolean useApproximateCountDistinct; + private boolean useApproximateTopN; + private boolean requireTimeCondition; + private boolean awaitInitializationOnStart; + private DateTimeZone sqlTimeZone; + private boolean metadataSegmentCacheEnable; + private long metadataSegmentPollPeriod; + private boolean useGroupingSetForExactDistinct; + private boolean computeInnerJoinCostAsFilter; + private boolean authorizeSystemTablesDirectly; + private boolean useNativeQueryExplain; + private boolean forceExpressionVirtualColumns; + private int maxNumericInFilters; + private boolean serializeComplexValues; + + public Builder(PlannerConfig base) + { + // Note: use accessors, not fields, since some tests change the + // config by defining a subclass. + + metadataRefreshPeriod = base.getMetadataRefreshPeriod(); + maxTopNLimit = base.getMaxTopNLimit(); + useApproximateCountDistinct = base.isUseApproximateCountDistinct(); + useApproximateTopN = base.isUseApproximateTopN(); + requireTimeCondition = base.isRequireTimeCondition(); + awaitInitializationOnStart = base.isAwaitInitializationOnStart(); + sqlTimeZone = base.getSqlTimeZone(); + metadataSegmentCacheEnable = base.isMetadataSegmentCacheEnable(); + useGroupingSetForExactDistinct = base.isUseGroupingSetForExactDistinct(); + metadataSegmentPollPeriod = base.getMetadataSegmentPollPeriod(); + computeInnerJoinCostAsFilter = base.computeInnerJoinCostAsFilter; + authorizeSystemTablesDirectly = base.isAuthorizeSystemTablesDirectly(); + useNativeQueryExplain = base.isUseNativeQueryExplain(); + forceExpressionVirtualColumns = base.isForceExpressionVirtualColumns(); + maxNumericInFilters = base.getMaxNumericInFilters(); + serializeComplexValues = base.shouldSerializeComplexValues(); + } + + public Builder requireTimeCondition(boolean option) + { + this.requireTimeCondition = option; + return this; + } + + public Builder maxTopNLimit(int value) + { + this.maxTopNLimit = value; + return this; + } + + public Builder maxNumericInFilters(int value) + { + this.maxNumericInFilters = value; + return this; + } + + public Builder useApproximateCountDistinct(boolean option) + { + this.useApproximateCountDistinct = option; + return this; + } + + public Builder useApproximateTopN(boolean option) + { + this.useApproximateTopN = option; + return this; + } + + public Builder useGroupingSetForExactDistinct(boolean option) + { + this.useGroupingSetForExactDistinct = option; + return this; + } + + public Builder computeInnerJoinCostAsFilter(boolean option) + { + this.computeInnerJoinCostAsFilter = option; + return this; + } + + public Builder sqlTimeZone(DateTimeZone value) + { + this.sqlTimeZone = value; + return this; + } + + public Builder authorizeSystemTablesDirectly(boolean option) + { + this.authorizeSystemTablesDirectly = option; + return this; + } + + public Builder serializeComplexValues(boolean option) + { + this.serializeComplexValues = option; + return this; + } + + public Builder useNativeQueryExplain(boolean option) + { + this.useNativeQueryExplain = option; + return this; + } + + public Builder metadataRefreshPeriod(String value) + { + this.metadataRefreshPeriod = new Period(value); + return this; + } + + public Builder withOverrides(final QueryContext queryContext) + { + useApproximateCountDistinct = queryContext.getAsBoolean( + CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT, + useApproximateCountDistinct + ); + useGroupingSetForExactDistinct = queryContext.getAsBoolean( + CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, + useGroupingSetForExactDistinct + ); + useApproximateTopN = queryContext.getAsBoolean( + CTX_KEY_USE_APPROXIMATE_TOPN, + useApproximateTopN + ); + computeInnerJoinCostAsFilter = queryContext.getAsBoolean( + CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER, + computeInnerJoinCostAsFilter + ); + useNativeQueryExplain = queryContext.getAsBoolean( + CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, + useNativeQueryExplain + ); + forceExpressionVirtualColumns = queryContext.getAsBoolean( + CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS, + forceExpressionVirtualColumns + ); + final int queryContextMaxNumericInFilters = queryContext.getAsInt( + CTX_MAX_NUMERIC_IN_FILTERS, + maxNumericInFilters + ); + maxNumericInFilters = validateMaxNumericInFilters( + queryContextMaxNumericInFilters, + maxNumericInFilters); + return this; + } + + private static int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters) + { + // if maxNumericInFIlters through context == 0 catch exception + // else if query context exceeds system set value throw error + if (queryContextMaxNumericInFilters == 0) { + throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS); + } else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters + && systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) { + throw new UOE( + "Expected parameter[%s] cannot exceed system set value of [%d]", + CTX_MAX_NUMERIC_IN_FILTERS, + systemConfigMaxNumericInFilters + ); + } + // if system set value is not present, thereby inferring default of -1 + if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) { + return systemConfigMaxNumericInFilters; + } + // all other cases return the valid query context value + return queryContextMaxNumericInFilters; + } + + public PlannerConfig build() + { + PlannerConfig config = new PlannerConfig(); + config.metadataRefreshPeriod = metadataRefreshPeriod; + config.maxTopNLimit = maxTopNLimit; + config.useApproximateCountDistinct = useApproximateCountDistinct; + config.useApproximateTopN = useApproximateTopN; + config.requireTimeCondition = requireTimeCondition; + config.awaitInitializationOnStart = awaitInitializationOnStart; + config.sqlTimeZone = sqlTimeZone; + config.metadataSegmentCacheEnable = metadataSegmentCacheEnable; + config.metadataSegmentPollPeriod = metadataSegmentPollPeriod; + config.useGroupingSetForExactDistinct = useGroupingSetForExactDistinct; + config.computeInnerJoinCostAsFilter = computeInnerJoinCostAsFilter; + config.authorizeSystemTablesDirectly = authorizeSystemTablesDirectly; + config.useNativeQueryExplain = useNativeQueryExplain; + config.maxNumericInFilters = maxNumericInFilters; + config.forceExpressionVirtualColumns = forceExpressionVirtualColumns; + config.serializeComplexValues = serializeComplexValues; + return config; + } + } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRelToDruidRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRelToDruidRule.java index bbc249c0544..8acc5c906b4 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRelToDruidRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidRelToDruidRule.java @@ -22,11 +22,13 @@ package org.apache.druid.sql.calcite.rule; import org.apache.calcite.plan.Convention; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.convert.ConverterRule; +import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.sql.calcite.rel.DruidConvention; import org.apache.druid.sql.calcite.rel.DruidRel; public class DruidRelToDruidRule extends ConverterRule { + private static final Logger log = new Logger(DruidRelToDruidRule.class); private static final DruidRelToDruidRule INSTANCE = new DruidRelToDruidRule(); private DruidRelToDruidRule() @@ -47,6 +49,12 @@ public class DruidRelToDruidRule extends ConverterRule @Override public RelNode convert(RelNode rel) { - return ((DruidRel) rel).asDruidConvention(); + try { + return ((DruidRel) rel).asDruidConvention(); + } + catch (Exception e) { + log.error(e, "Conversion failed"); + throw e; + } } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java index 4b1b62b044b..4af0b934ef1 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java @@ -1153,7 +1153,7 @@ public class SystemSchema extends AbstractSchema authorizerMapper ); if (!stateAccess.isAllowed()) { - throw new ForbiddenException("Insufficient permission to view servers : " + stateAccess); + throw new ForbiddenException("Insufficient permission to view servers: " + stateAccess.toMessage()); } } } diff --git a/sql/src/main/java/org/apache/druid/sql/guice/SqlModule.java b/sql/src/main/java/org/apache/druid/sql/guice/SqlModule.java index 6e8acdd4e8d..589ed2575d4 100644 --- a/sql/src/main/java/org/apache/druid/sql/guice/SqlModule.java +++ b/sql/src/main/java/org/apache/druid/sql/guice/SqlModule.java @@ -31,7 +31,6 @@ import org.apache.druid.sql.avatica.AvaticaModule; import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule; import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion; import org.apache.druid.sql.calcite.planner.CalcitePlannerModule; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.schema.DruidCalciteSchemaModule; import org.apache.druid.sql.calcite.schema.DruidSchemaManager; import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager; @@ -56,7 +55,6 @@ public class SqlModule implements Module public SqlModule() { - } @VisibleForTesting @@ -70,48 +68,48 @@ public class SqlModule implements Module @Override public void configure(Binder binder) { - if (isEnabled()) { - Calcites.setSystemProperties(); + if (!isEnabled()) { + return; + } - PolyBind.optionBinder(binder, Key.get(ViewManager.class)) - .addBinding(NoopViewManager.TYPE) - .to(NoopViewManager.class) - .in(LazySingleton.class); + PolyBind.optionBinder(binder, Key.get(ViewManager.class)) + .addBinding(NoopViewManager.TYPE) + .to(NoopViewManager.class) + .in(LazySingleton.class); - PolyBind.createChoiceWithDefault( - binder, - PROPERTY_SQL_VIEW_MANAGER_TYPE, - Key.get(ViewManager.class), - NoopViewManager.TYPE - ); + PolyBind.createChoiceWithDefault( + binder, + PROPERTY_SQL_VIEW_MANAGER_TYPE, + Key.get(ViewManager.class), + NoopViewManager.TYPE + ); - PolyBind.optionBinder(binder, Key.get(DruidSchemaManager.class)) - .addBinding(NoopDruidSchemaManager.TYPE) - .to(NoopDruidSchemaManager.class) - .in(LazySingleton.class); + PolyBind.optionBinder(binder, Key.get(DruidSchemaManager.class)) + .addBinding(NoopDruidSchemaManager.TYPE) + .to(NoopDruidSchemaManager.class) + .in(LazySingleton.class); - PolyBind.createChoiceWithDefault( - binder, - PROPERTY_SQL_SCHEMA_MANAGER_TYPE, - Key.get(DruidSchemaManager.class), - NoopDruidSchemaManager.TYPE - ); + PolyBind.createChoiceWithDefault( + binder, + PROPERTY_SQL_SCHEMA_MANAGER_TYPE, + Key.get(DruidSchemaManager.class), + NoopDruidSchemaManager.TYPE + ); - binder.install(new DruidCalciteSchemaModule()); - binder.install(new CalcitePlannerModule()); - binder.install(new SqlAggregationModule()); - binder.install(new DruidViewModule()); + binder.install(new DruidCalciteSchemaModule()); + binder.install(new CalcitePlannerModule()); + binder.install(new SqlAggregationModule()); + binder.install(new DruidViewModule()); - // QueryLookupOperatorConversion isn't in DruidOperatorTable since it needs a LookupExtractorFactoryContainerProvider injected. - SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class); + // QueryLookupOperatorConversion isn't in DruidOperatorTable since it needs a LookupExtractorFactoryContainerProvider injected. + SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class); - if (isJsonOverHttpEnabled()) { - binder.install(new SqlHttpModule()); - } + if (isJsonOverHttpEnabled()) { + binder.install(new SqlHttpModule()); + } - if (isAvaticaEnabled()) { - binder.install(new AvaticaModule()); - } + if (isAvaticaEnabled()) { + binder.install(new AvaticaModule()); } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java index bc3fa82fe46..7c7b1c7c237 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java @@ -34,7 +34,6 @@ import org.apache.druid.java.util.common.guava.Yielders; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.BadQueryException; import org.apache.druid.query.QueryCapacityExceededException; -import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryUnsupportedException; @@ -44,11 +43,13 @@ import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.HttpStatement; +import org.apache.druid.sql.SqlExecutionReporter; import org.apache.druid.sql.SqlLifecycleManager; +import org.apache.druid.sql.SqlLifecycleManager.Cancelable; import org.apache.druid.sql.SqlPlanningException; import org.apache.druid.sql.SqlRowTransformer; +import org.apache.druid.sql.SqlStatementFactory; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -63,6 +64,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; + import java.io.IOException; import java.util.List; import java.util.Set; @@ -78,7 +80,7 @@ public class SqlResource private final ObjectMapper jsonMapper; private final AuthorizerMapper authorizerMapper; - private final SqlLifecycleFactory sqlLifecycleFactory; + private final SqlStatementFactory sqlLifecycleFactory; private final SqlLifecycleManager sqlLifecycleManager; private final ServerConfig serverConfig; @@ -86,7 +88,7 @@ public class SqlResource public SqlResource( @Json ObjectMapper jsonMapper, AuthorizerMapper authorizerMapper, - SqlLifecycleFactory sqlLifecycleFactory, + SqlStatementFactory sqlLifecycleFactory, SqlLifecycleManager sqlLifecycleManager, ServerConfig serverConfig ) @@ -106,23 +108,14 @@ public class SqlResource @Context final HttpServletRequest req ) throws IOException { - final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), new QueryContext(sqlQuery.getContext())); - final String remoteAddr = req.getRemoteAddr(); + final HttpStatement stmt = sqlLifecycleFactory.httpStatement(sqlQuery, req); + final String sqlQueryId = stmt.sqlQueryId(); final String currThreadName = Thread.currentThread().getName(); try { Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId)); - - lifecycle.setParameters(sqlQuery.getParameterList()); - lifecycle.validateAndAuthorize(req); - // must add after lifecycle is authorized - sqlLifecycleManager.add(sqlQueryId, lifecycle); - - lifecycle.plan(); - - final SqlRowTransformer rowTransformer = lifecycle.createRowTransformer(); - final Sequence sequence = lifecycle.execute(); + final Sequence sequence = stmt.execute(); + final SqlRowTransformer rowTransformer = stmt.createRowTransformer(); final Yielder yielder0 = Yielders.each(sequence); try { @@ -165,7 +158,7 @@ public class SqlResource } finally { yielder.close(); - endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, os.getCount()); + endLifecycle(stmt, e, os.getCount()); } } ) @@ -184,36 +177,37 @@ public class SqlResource } } catch (QueryCapacityExceededException cap) { - endLifecycle(sqlQueryId, lifecycle, cap, remoteAddr, -1); + endLifecycle(stmt, cap, -1); return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, cap, sqlQueryId); } catch (QueryUnsupportedException unsupported) { - endLifecycle(sqlQueryId, lifecycle, unsupported, remoteAddr, -1); + endLifecycle(stmt, unsupported, -1); return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, unsupported, sqlQueryId); } catch (QueryTimeoutException timeout) { - endLifecycle(sqlQueryId, lifecycle, timeout, remoteAddr, -1); + endLifecycle(stmt, timeout, -1); return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, timeout, sqlQueryId); } catch (BadQueryException e) { - endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1); + endLifecycle(stmt, e, -1); return buildNonOkResponse(BadQueryException.STATUS_CODE, e, sqlQueryId); } catch (ForbiddenException e) { - endLifecycleWithoutEmittingMetrics(sqlQueryId, lifecycle); + endLifecycleWithoutEmittingMetrics(stmt); throw (ForbiddenException) serverConfig.getErrorResponseTransformStrategy() .transformIfNeeded(e); // let ForbiddenExceptionMapper handle this } catch (RelOptPlanner.CannotPlanException e) { - endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1); + endLifecycle(stmt, e, -1); SqlPlanningException spe = new SqlPlanningException(SqlPlanningException.PlanningError.UNSUPPORTED_SQL_ERROR, e.getMessage()); return buildNonOkResponse(BadQueryException.STATUS_CODE, spe, sqlQueryId); } - // calcite throws a java.lang.AssertionError which is type error not exception. using throwable will catch all + // Calcite throws a java.lang.AssertionError which is type error not exception. + // Using throwable will catch all. catch (Throwable e) { log.warn(e, "Failed to handle query: %s", sqlQuery); - endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1); + endLifecycle(stmt, e, -1); return buildNonOkResponse( Status.INTERNAL_SERVER_ERROR.getStatusCode(), @@ -227,23 +221,27 @@ public class SqlResource } private void endLifecycleWithoutEmittingMetrics( - String sqlQueryId, - SqlLifecycle lifecycle + HttpStatement stmt ) { - sqlLifecycleManager.remove(sqlQueryId, lifecycle); + sqlLifecycleManager.remove(stmt.sqlQueryId(), stmt); + stmt.closeQuietly(); } private void endLifecycle( - String sqlQueryId, - SqlLifecycle lifecycle, + HttpStatement stmt, @Nullable final Throwable e, - @Nullable final String remoteAddress, final long bytesWritten ) { - lifecycle.finalizeStateAndEmitLogsAndMetrics(e, remoteAddress, bytesWritten); - sqlLifecycleManager.remove(sqlQueryId, lifecycle); + SqlExecutionReporter reporter = stmt.reporter(); + if (e == null) { + reporter.succeeded(bytesWritten); + } else { + reporter.failed(e); + } + sqlLifecycleManager.remove(stmt.sqlQueryId(), stmt); + stmt.close(); } private Response buildNonOkResponse(int status, SanitizableException e, String sqlQueryId) @@ -270,13 +268,18 @@ public class SqlResource { log.debug("Received cancel request for query [%s]", sqlQueryId); - List lifecycles = sqlLifecycleManager.getAll(sqlQueryId); + List lifecycles = sqlLifecycleManager.getAll(sqlQueryId); if (lifecycles.isEmpty()) { return Response.status(Status.NOT_FOUND).build(); } + + // Considers only datasource and table resources; not context + // key resources when checking permissions. This means that a user's + // permission to cancel a query depends on the datasource, not the + // context variables used in the query. Set resources = lifecycles .stream() - .flatMap(lifecycle -> lifecycle.getRequiredResourceActions().stream()) + .flatMap(lifecycle -> lifecycle.resources().stream()) .collect(Collectors.toSet()); Access access = AuthorizationUtils.authorizeAllResourceActions( req, @@ -287,7 +290,7 @@ public class SqlResource if (access.isAllowed()) { // should remove only the lifecycles in the snapshot. sqlLifecycleManager.removeAll(sqlQueryId, lifecycles); - lifecycles.forEach(SqlLifecycle::cancel); + lifecycles.forEach(Cancelable::cancel); return Response.status(Status.ACCEPTED).build(); } else { return Response.status(Status.FORBIDDEN).build(); diff --git a/sql/src/main/resources/saffron.properties b/sql/src/main/resources/saffron.properties new file mode 100644 index 00000000000..ec5213c7da8 --- /dev/null +++ b/sql/src/main/resources/saffron.properties @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +#------------------------------------------------------------- +# +# Properties for Calcite (formerly known as "Saffron"). +# Set here to ensure that the properties are absolutely, +# positively read when Calcite first initializes. +# +# This file _should_ be redundant with Calcites, but +# there do seem to be race conditions at various times. + +calcite.default.charset=UTF-16LE +calcite.default.nationalcharset=UTF-16LE +calcite.default.collation.name=UTF-16LE$en_US diff --git a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleManagerTest.java b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleManagerTest.java index 8ddef9fc8d5..74d5439fcec 100644 --- a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleManagerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleManagerTest.java @@ -20,13 +20,15 @@ package org.apache.druid.sql; import com.google.common.collect.ImmutableList; -import org.apache.druid.sql.SqlLifecycle.State; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.SqlLifecycleManager.Cancelable; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.mockito.Mockito; +import java.util.Collections; import java.util.List; +import java.util.Set; public class SqlLifecycleManagerTest { @@ -38,18 +40,11 @@ public class SqlLifecycleManagerTest lifecycleManager = new SqlLifecycleManager(); } - @Test - public void testAddNonAuthorizedLifeCycle() - { - SqlLifecycle lifecycle = mockLifecycle(State.INITIALIZED); - Assert.assertThrows(AssertionError.class, () -> lifecycleManager.add("sqlId", lifecycle)); - } - @Test public void testAddAuthorizedLifecycle() { final String sqlId = "sqlId"; - SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED); + Cancelable lifecycle = mockLifecycle(); lifecycleManager.add(sqlId, lifecycle); Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId)); } @@ -58,7 +53,7 @@ public class SqlLifecycleManagerTest public void testRemoveValidLifecycle() { final String sqlId = "sqlId"; - SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED); + Cancelable lifecycle = mockLifecycle(); lifecycleManager.add(sqlId, lifecycle); Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId)); lifecycleManager.remove(sqlId, lifecycle); @@ -69,7 +64,7 @@ public class SqlLifecycleManagerTest public void testRemoveInvalidSqlQueryId() { final String sqlId = "sqlId"; - SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED); + Cancelable lifecycle = mockLifecycle(); lifecycleManager.add(sqlId, lifecycle); Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId)); lifecycleManager.remove("invalid", lifecycle); @@ -80,10 +75,10 @@ public class SqlLifecycleManagerTest public void testRemoveValidSqlQueryIdDifferntLifecycleObject() { final String sqlId = "sqlId"; - SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED); + Cancelable lifecycle = mockLifecycle(); lifecycleManager.add(sqlId, lifecycle); Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId)); - lifecycleManager.remove(sqlId, mockLifecycle(State.AUTHORIZED)); + lifecycleManager.remove(sqlId, mockLifecycle()); Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId)); } @@ -91,10 +86,10 @@ public class SqlLifecycleManagerTest public void testRemoveAllValidSqlQueryIdSubsetOfLifecycles() { final String sqlId = "sqlId"; - final List lifecycles = ImmutableList.of( - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED) + final List lifecycles = ImmutableList.of( + mockLifecycle(), + mockLifecycle(), + mockLifecycle() ); lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle)); Assert.assertEquals(lifecycles, lifecycleManager.getAll(sqlId)); @@ -106,10 +101,10 @@ public class SqlLifecycleManagerTest public void testRemoveAllInvalidSqlQueryId() { final String sqlId = "sqlId"; - final List lifecycles = ImmutableList.of( - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED) + final List lifecycles = ImmutableList.of( + mockLifecycle(), + mockLifecycle(), + mockLifecycle() ); lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle)); Assert.assertEquals(lifecycles, lifecycleManager.getAll(sqlId)); @@ -121,22 +116,34 @@ public class SqlLifecycleManagerTest public void testGetAllReturnsListCopy() { final String sqlId = "sqlId"; - final List lifecycles = ImmutableList.of( - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED), - mockLifecycle(State.AUTHORIZED) + final List lifecycles = ImmutableList.of( + mockLifecycle(), + mockLifecycle(), + mockLifecycle() ); lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle)); - final List lifecyclesFromGetAll = lifecycleManager.getAll(sqlId); + final List lifecyclesFromGetAll = lifecycleManager.getAll(sqlId); lifecycleManager.removeAll(sqlId, lifecyclesFromGetAll); Assert.assertEquals(lifecycles, lifecyclesFromGetAll); Assert.assertTrue(lifecycleManager.getAll(sqlId).isEmpty()); } - private static SqlLifecycle mockLifecycle(State state) + private static Cancelable mockLifecycle() { - SqlLifecycle lifecycle = Mockito.mock(SqlLifecycle.class); - Mockito.when(lifecycle.getState()).thenReturn(state); - return lifecycle; + return new MockCancellable(); + } + + private static class MockCancellable implements Cancelable + { + @Override + public Set resources() + { + return Collections.emptySet(); + } + + @Override + public void cancel() + { + } } } diff --git a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java deleted file mode 100644 index b30772dc450..00000000000 --- a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java +++ /dev/null @@ -1,316 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.sql; - -import com.google.common.base.Suppliers; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import org.apache.calcite.avatica.SqlType; -import org.apache.calcite.avatica.remote.TypedValue; -import org.apache.calcite.sql.parser.SqlParseException; -import org.apache.calcite.tools.RelConversionException; -import org.apache.calcite.tools.ValidationException; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; -import org.apache.druid.query.DefaultQueryConfig; -import org.apache.druid.query.QueryContext; -import org.apache.druid.query.QueryContexts; -import org.apache.druid.server.QueryStackTests; -import org.apache.druid.server.log.RequestLogger; -import org.apache.druid.server.security.Access; -import org.apache.druid.server.security.Action; -import org.apache.druid.server.security.AuthConfig; -import org.apache.druid.server.security.Resource; -import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.server.security.ResourceType; -import org.apache.druid.sql.calcite.planner.DruidPlanner; -import org.apache.druid.sql.calcite.planner.PlannerContext; -import org.apache.druid.sql.calcite.planner.PlannerFactory; -import org.apache.druid.sql.calcite.planner.PlannerResult; -import org.apache.druid.sql.calcite.planner.PrepareResult; -import org.apache.druid.sql.calcite.util.CalciteTests; -import org.apache.druid.sql.http.SqlParameter; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import javax.servlet.http.HttpServletRequest; - -import java.io.IOException; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; - -public class SqlLifecycleTest -{ - private PlannerFactory plannerFactory; - private ServiceEmitter serviceEmitter; - private RequestLogger requestLogger; - private SqlLifecycleFactory sqlLifecycleFactory; - private DefaultQueryConfig defaultQueryConfig; - - @Before - public void setup() - { - this.plannerFactory = EasyMock.createMock(PlannerFactory.class); - this.serviceEmitter = EasyMock.createMock(ServiceEmitter.class); - this.requestLogger = EasyMock.createMock(RequestLogger.class); - this.defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of("DEFAULT_KEY", "DEFAULT_VALUE")); - - this.sqlLifecycleFactory = new SqlLifecycleFactory( - plannerFactory, - serviceEmitter, - requestLogger, - QueryStackTests.DEFAULT_NOOP_SCHEDULER, - new AuthConfig(), - Suppliers.ofInstance(defaultQueryConfig) - ); - } - - @Test - public void testIgnoredQueryContextParametersAreIgnored() - { - SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - final String sql = "select 1 + ?"; - final Map queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"); - lifecycle.initialize(sql, new QueryContext(queryContext)); - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size()); - // should contain only query id, not bySegment since it is not valid for SQL - Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(PlannerContext.CTX_SQL_QUERY_ID)); - } - - @Test - public void testDefaultQueryContextIsApplied() - { - SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - // lifecycle should not have a query context is there on it when created/factorized - Assert.assertNull(lifecycle.getQueryContext()); - final String sql = "select 1 + ?"; - final Map queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"); - QueryContext testQueryContext = new QueryContext(queryContext); - // default query context isn't applied to query context until lifecycle is initialized - for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) { - Assert.assertFalse(testQueryContext.getMergedParams().containsKey(defaultContextKey)); - } - lifecycle.initialize(sql, testQueryContext); - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size()); - // should lifecycle should contain default query context values after initialization - for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) { - Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(defaultContextKey)); - } - } - - @Test - public void testStateTransition() - throws ValidationException, SqlParseException, RelConversionException, IOException - { - SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - final String sql = "select 1 + ?"; - Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); - - // test initialize - lifecycle.initialize(sql, new QueryContext()); - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); - lifecycle.setParameters(parameters); - // setting parameters should not change the state - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - - // test authorization - DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class); - PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class); - EasyMock.expect(plannerFactory.createPlanner(EasyMock.eq(sql), EasyMock.anyObject())).andReturn(mockPlanner).once(); - EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once(); - mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT); - EasyMock.expectLastCall(); - mockPlannerContext.setParameters(parameters); - EasyMock.expectLastCall(); - mockPlanner.validate(); - EasyMock.expectLastCall(); - Set mockActions = new HashSet<>(); - mockActions.add(new ResourceAction(new Resource("dummy", ResourceType.DATASOURCE), Action.READ)); - EasyMock.expect(mockPlanner.resourceActions(EasyMock.eq(false))).andReturn(mockActions).once(); - EasyMock.expectLastCall(); - EasyMock.expect(mockPlanner.authorize(EasyMock.anyObject(), EasyMock.eq(false))).andReturn(Access.OK).once(); - EasyMock.expectLastCall(); - - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); - - lifecycle.validateAndAuthorize(CalciteTests.REGULAR_USER_AUTH_RESULT); - Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); - - // test prepare - PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class); - EasyMock.expect(mockPlanner.prepare()).andReturn(mockPrepareResult).once(); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - lifecycle.prepare(); - // prepare doens't change lifecycle state - Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - - // test plan - PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class); - EasyMock.expect(mockPlanner.plan()).andReturn(mockPlanResult).once(); - mockPlanner.close(); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - lifecycle.plan(); - Assert.assertEquals(mockPlannerContext, lifecycle.getPlannerContext()); - Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - // test execute - EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - lifecycle.execute(); - Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - // test emit - EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once(); - CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(ImmutableList.of("id")); - EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(nativeQueryIds).times(2); - EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once(); - - serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); - EasyMock.expectLastCall(); - serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); - EasyMock.expectLastCall(); - requestLogger.logSqlQuery(EasyMock.anyObject()); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - lifecycle.finalizeStateAndEmitLogsAndMetrics(null, null, 10); - Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - } - - @Test - public void testStateTransitionHttpRequest() - throws ValidationException, SqlParseException, RelConversionException, IOException - { - // this test is a duplicate of testStateTransition except with a slight - // variation of how validate and authorize is run - SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - final String sql = "select 1 + ?"; - Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); - - // test initialize - lifecycle.initialize(sql, new QueryContext()); - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); - lifecycle.setParameters(parameters); - // setting parameters should not change the state - Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - - // test authorization - DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class); - PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class); - EasyMock.expect(plannerFactory.createPlanner(EasyMock.eq(sql), EasyMock.anyObject())).andReturn(mockPlanner).once(); - EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once(); - mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT); - EasyMock.expectLastCall(); - mockPlannerContext.setParameters(parameters); - EasyMock.expectLastCall(); - mockPlanner.validate(); - EasyMock.expectLastCall(); - Set mockActions = new HashSet<>(); - mockActions.add(new ResourceAction(new Resource("dummy", ResourceType.DATASOURCE), Action.READ)); - EasyMock.expect(mockPlanner.resourceActions(EasyMock.eq(false))).andReturn(mockActions).once(); - EasyMock.expectLastCall(); - EasyMock.expect(mockPlanner.authorize(EasyMock.anyObject(), EasyMock.eq(false))).andReturn(Access.OK).once(); - EasyMock.expectLastCall(); - - // Note: can't check the request usage with mocks: the code is run - // in a function which the mock doesn't actually call. - HttpServletRequest request = EasyMock.createMock(HttpServletRequest.class); - EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once(); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); - - lifecycle.validateAndAuthorize(request); - Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); - - // test prepare - PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class); - EasyMock.expect(mockPlanner.prepare()).andReturn(mockPrepareResult).once(); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - lifecycle.prepare(); - // prepare doens't change lifecycle state - Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); - - // test plan - PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class); - EasyMock.expect(mockPlanner.plan()).andReturn(mockPlanResult).once(); - mockPlanner.close(); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - lifecycle.plan(); - Assert.assertEquals(mockPlannerContext, lifecycle.getPlannerContext()); - Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - // test execute - EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - lifecycle.execute(); - Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - // test emit - EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once(); - CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(ImmutableList.of("id")); - EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(nativeQueryIds).times(2); - EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once(); - - serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); - EasyMock.expectLastCall(); - serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); - EasyMock.expectLastCall(); - requestLogger.logSqlQuery(EasyMock.anyObject()); - EasyMock.expectLastCall(); - EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - - lifecycle.finalizeStateAndEmitLogsAndMetrics(null, null, 10); - Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState()); - EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); - } -} diff --git a/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java b/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java new file mode 100644 index 00000000000..dff9c52e330 --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java @@ -0,0 +1,469 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import com.google.common.base.Suppliers; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeField; +import org.apache.druid.java.util.common.concurrent.Execs; +import org.apache.druid.java.util.common.guava.LazySequence; +import org.apache.druid.java.util.common.guava.Sequence; +import org.apache.druid.java.util.common.io.Closer; +import org.apache.druid.math.expr.ExprMacroTable; +import org.apache.druid.query.DefaultQueryConfig; +import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContexts; +import org.apache.druid.query.QueryRunnerFactoryConglomerate; +import org.apache.druid.server.QueryScheduler; +import org.apache.druid.server.QueryStackTests; +import org.apache.druid.server.initialization.ServerConfig; +import org.apache.druid.server.log.TestRequestLogger; +import org.apache.druid.server.metrics.NoopServiceEmitter; +import org.apache.druid.server.scheduling.HiLoQueryLaningStrategy; +import org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy; +import org.apache.druid.server.security.AuthConfig; +import org.apache.druid.server.security.AuthenticationResult; +import org.apache.druid.server.security.ForbiddenException; +import org.apache.druid.sql.SqlPlanningException.PlanningError; +import org.apache.druid.sql.calcite.planner.CalciteRulesManager; +import org.apache.druid.sql.calcite.planner.DruidOperatorTable; +import org.apache.druid.sql.calcite.planner.PlannerConfig; +import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.PlannerFactory; +import org.apache.druid.sql.calcite.planner.PrepareResult; +import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; +import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.QueryLogHook; +import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; +import org.apache.druid.sql.http.SqlQuery; +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import javax.servlet.http.HttpServletRequest; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class SqlStatementTest +{ + private static QueryRunnerFactoryConglomerate conglomerate; + private static Closer resourceCloser; + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Rule + public QueryLogHook queryLogHook = QueryLogHook.create(); + private SpecificSegmentsQuerySegmentWalker walker = null; + private TestRequestLogger testRequestLogger; + private ListeningExecutorService executorService; + private SqlStatementFactory sqlLifecycleFactory; + private final DefaultQueryConfig defaultQueryConfig = new DefaultQueryConfig( + ImmutableMap.of("DEFAULT_KEY", "DEFAULT_VALUE")); + + @BeforeClass + public static void setUpClass() + { + resourceCloser = Closer.create(); + conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser); + } + + @AfterClass + public static void tearDownClass() throws IOException + { + resourceCloser.close(); + } + + @Before + public void setUp() throws Exception + { + final QueryScheduler scheduler = new QueryScheduler( + 5, + ManualQueryPrioritizationStrategy.INSTANCE, + new HiLoQueryLaningStrategy(40), + new ServerConfig() + ) + { + @Override + public Sequence run(Query query, Sequence resultSequence) + { + return super.run( + query, + new LazySequence(() -> { + return resultSequence; + }) + ); + } + }; + + executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s")); + walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler); + + final PlannerConfig plannerConfig = PlannerConfig.builder().serializeComplexValues(false).build(); + final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema( + conglomerate, + walker, + plannerConfig, + CalciteTests.TEST_AUTHORIZER_MAPPER + ); + final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); + final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); + + testRequestLogger = new TestRequestLogger(); + + final PlannerFactory plannerFactory = new PlannerFactory( + rootSchema, + CalciteTests.createMockQueryMakerFactory(walker, conglomerate), + operatorTable, + macroTable, + plannerConfig, + CalciteTests.TEST_AUTHORIZER_MAPPER, + CalciteTests.getJsonMapper(), + CalciteTests.DRUID_SCHEMA_NAME, + new CalciteRulesManager(ImmutableSet.of()) + ); + + this.sqlLifecycleFactory = new SqlStatementFactory( + plannerFactory, + new NoopServiceEmitter(), + testRequestLogger, + QueryStackTests.DEFAULT_NOOP_SCHEDULER, + new AuthConfig(), + Suppliers.ofInstance(defaultQueryConfig), + new SqlLifecycleManager() + ); + } + + @After + public void tearDown() throws Exception + { + walker.close(); + walker = null; + executorService.shutdownNow(); + executorService.awaitTermination(2, TimeUnit.SECONDS); + } + + HttpServletRequest request(boolean ok) + { + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) + .anyTimes(); + EasyMock.expect(req.getRemoteAddr()).andReturn(null).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, ok); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) + .anyTimes(); + EasyMock.replay(req); + return req; + } + + //----------------------------------------------------------------- + // Direct statements: using an auth result for verification. + + private SqlQueryPlus queryPlus(final String sql, final AuthenticationResult authResult) + { + return SqlQueryPlus.builder(sql).auth(authResult).build(); + } + + @Test + public void testDirectHappyPath() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo", + CalciteTests.REGULAR_USER_AUTH_RESULT); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + List results = stmt.execute().toList(); + assertEquals(1, results.size()); + assertEquals(6L, results.get(0)[0]); + assertEquals("foo", results.get(0)[1]); + } + + @Test + public void testDirectSyntaxError() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS", + CalciteTests.REGULAR_USER_AUTH_RESULT); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + try { + stmt.execute(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testDirectValidationError() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus", + CalciteTests.REGULAR_USER_AUTH_RESULT); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + try { + stmt.execute(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testDirectPermissionError() + { + SqlQueryPlus sqlReq = queryPlus( + "select count(*) from forbiddenDatasource", + CalciteTests.REGULAR_USER_AUTH_RESULT); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + try { + stmt.execute(); + fail(); + } + catch (ForbiddenException e) { + // Expected + } + } + + //----------------------------------------------------------------- + // HTTP statements: using a servlet request for verification. + + private SqlQuery makeQuery(String sql) + { + return new SqlQuery( + sql, + null, + false, + false, + false, + null, + null + ); + } + + @Test + public void testHttpHappyPath() + { + HttpStatement stmt = sqlLifecycleFactory.httpStatement( + makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo"), + request(true) + ); + List results = stmt.execute().toList(); + assertEquals(1, results.size()); + assertEquals(6L, results.get(0)[0]); + assertEquals("foo", results.get(0)[1]); + } + + @Test + public void testHttpSyntaxError() + { + HttpStatement stmt = sqlLifecycleFactory.httpStatement( + makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS"), + request(true) + ); + try { + stmt.execute(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testHttpValidationError() + { + HttpStatement stmt = sqlLifecycleFactory.httpStatement( + makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus"), + request(true) + ); + try { + stmt.execute(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testHttpPermissionError() + { + HttpStatement stmt = sqlLifecycleFactory.httpStatement( + makeQuery("select count(*) from forbiddenDatasource"), + request(false) + ); + try { + stmt.execute(); + fail(); + } + catch (ForbiddenException e) { + // Expected + } + } + + //----------------------------------------------------------------- + // Prepared statements: using a prepare/execute model. + + @Test + public void testJdbcHappyPath() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo", + CalciteTests.REGULAR_USER_AUTH_RESULT); + PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq); + + PrepareResult prepareResult = stmt.prepare(); + RelDataType rowType = prepareResult.getRowType(); + assertEquals(2, rowType.getFieldCount()); + List fields = rowType.getFieldList(); + assertEquals("cnt", fields.get(0).getName()); + assertEquals("BIGINT", fields.get(0).getType().toString()); + assertEquals("TheFoo", fields.get(1).getName()); + assertEquals("CHAR(3)", fields.get(1).getType().toString()); + + // JDBC supports a prepare once, execute many model + for (int i = 0; i < 3; i++) { + List results = stmt + .execute(Collections.emptyList()) + .execute() + .toList(); + assertEquals(1, results.size()); + assertEquals(6L, results.get(0)[0]); + assertEquals("foo", results.get(0)[1]); + } + } + + @Test + public void testJdbcSyntaxError() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS", + CalciteTests.REGULAR_USER_AUTH_RESULT); + PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq); + try { + stmt.prepare(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testJdbcValidationError() + { + SqlQueryPlus sqlReq = queryPlus( + "SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus", + CalciteTests.REGULAR_USER_AUTH_RESULT); + PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq); + try { + stmt.prepare(); + fail(); + } + catch (SqlPlanningException e) { + // Expected + assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode()); + } + } + + @Test + public void testJdbcPermissionError() + { + SqlQueryPlus sqlReq = queryPlus( + "select count(*) from forbiddenDatasource", + CalciteTests.REGULAR_USER_AUTH_RESULT); + PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq); + try { + stmt.prepare(); + fail(); + } + catch (ForbiddenException e) { + // Expected + } + } + + //----------------------------------------------------------------- + // Generic tests. + + @Test + public void testIgnoredQueryContextParametersAreIgnored() + { + SqlQueryPlus sqlReq = SqlQueryPlus + .builder("select 1 + ?") + .context(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true")) + .auth(CalciteTests.REGULAR_USER_AUTH_RESULT) + .build(); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + Map context = stmt.sqlRequest().context().getMergedParams(); + Assert.assertEquals(2, context.size()); + // should contain only query id, not bySegment since it is not valid for SQL + Assert.assertTrue(context.containsKey(PlannerContext.CTX_SQL_QUERY_ID)); + } + + @Test + public void testDefaultQueryContextIsApplied() + { + SqlQueryPlus sqlReq = SqlQueryPlus + .builder("select 1 + ?") + .context(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true")) + .auth(CalciteTests.REGULAR_USER_AUTH_RESULT) + .build(); + DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq); + Map context = stmt.sqlRequest().context().getMergedParams(); + Assert.assertEquals(2, context.size()); + // Statement should contain default query context values + for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) { + Assert.assertTrue(context.containsKey(defaultContextKey)); + } + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 81167d919f2..858a2773711 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -29,9 +29,7 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.google.inject.Binder; import com.google.inject.Injector; -import com.google.inject.Module; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import com.google.inject.name.Names; @@ -42,9 +40,9 @@ import org.apache.calcite.avatica.MissingResultsException; import org.apache.calcite.avatica.NoSuchStatementException; import org.apache.calcite.avatica.server.AbstractAvaticaHandler; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.LazySingleton; -import org.apache.druid.initialization.Initialization; +import org.apache.druid.guice.StartupInjectorBuilder; +import org.apache.druid.initialization.CoreInjectorBuilder; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; @@ -194,46 +192,39 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER); testRequestLogger = new TestRequestLogger(); - injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), - ImmutableList.of( - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER); - binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER); - binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR); - binder.bind(RequestLogger.class).toInstance(testRequestLogger); - binder.bind(DruidSchemaCatalog.class).toInstance(rootSchema); - for (NamedSchema schema : rootSchema.getNamedSchemas().values()) { - Multibinder.newSetBinder(binder, NamedSchema.class).addBinding().toInstance(schema); - } - binder.bind(QueryLifecycleFactory.class) - .toInstance(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate)); - binder.bind(DruidOperatorTable.class).toInstance(operatorTable); - binder.bind(ExprMacroTable.class).toInstance(macroTable); - binder.bind(PlannerConfig.class).toInstance(plannerConfig); - binder.bind(String.class) - .annotatedWith(DruidSchemaName.class) - .toInstance(CalciteTests.DRUID_SCHEMA_NAME); - binder.bind(AvaticaServerConfig.class).toInstance(AVATICA_CONFIG); - binder.bind(ServiceEmitter.class).to(NoopServiceEmitter.class); - binder.bind(QuerySchedulerProvider.class).in(LazySingleton.class); - binder.bind(QueryScheduler.class) - .toProvider(QuerySchedulerProvider.class) - .in(LazySingleton.class); - binder.bind(QueryMakerFactory.class).to(NativeQueryMakerFactory.class); - binder.bind(new TypeLiteral>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of()))); - binder.bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of())); - } + injector = new CoreInjectorBuilder(new StartupInjectorBuilder().build()) + .addModule(binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); + binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER); + binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER); + binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR); + binder.bind(RequestLogger.class).toInstance(testRequestLogger); + binder.bind(DruidSchemaCatalog.class).toInstance(rootSchema); + for (NamedSchema schema : rootSchema.getNamedSchemas().values()) { + Multibinder.newSetBinder(binder, NamedSchema.class).addBinding().toInstance(schema); } - ) - ); + binder.bind(QueryLifecycleFactory.class) + .toInstance(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate)); + binder.bind(DruidOperatorTable.class).toInstance(operatorTable); + binder.bind(ExprMacroTable.class).toInstance(macroTable); + binder.bind(PlannerConfig.class).toInstance(plannerConfig); + binder.bind(String.class) + .annotatedWith(DruidSchemaName.class) + .toInstance(CalciteTests.DRUID_SCHEMA_NAME); + binder.bind(AvaticaServerConfig.class).toInstance(AVATICA_CONFIG); + binder.bind(ServiceEmitter.class).to(NoopServiceEmitter.class); + binder.bind(QuerySchedulerProvider.class).in(LazySingleton.class); + binder.bind(QueryScheduler.class) + .toProvider(QuerySchedulerProvider.class) + .in(LazySingleton.class); + binder.bind(QueryMakerFactory.class).to(NativeQueryMakerFactory.class); + binder.bind(new TypeLiteral>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of()))); + binder.bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of())); + } + ) + .build(); druidMeta = injector.getInstance(DruidMeta.class); final AbstractAvaticaHandler handler = this.getAvaticaHandler(druidMeta); @@ -779,10 +770,9 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase @Test public void testTooManyStatements() throws SQLException { - client.createStatement(); - client.createStatement(); - client.createStatement(); - client.createStatement(); + for (int i = 0; i < 4; i++) { + client.createStatement(); + } expectedException.expect(AvaticaClientRuntimeException.class); expectedException.expectMessage("Too many open statements, limit is [4]"); @@ -792,16 +782,9 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase @Test public void testNotTooManyStatementsWhenYouCloseThem() throws SQLException { - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); - client.createStatement().close(); + for (int i = 0; i < 10; i++) { + client.createStatement().close(); + } } /** @@ -1133,8 +1116,13 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase catch (SQLException e) { // Expected } - // SqlLifecycle does not allow logging for security failures. - Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size()); + Assert.assertEquals(1, testRequestLogger.getSqlQueryLogs().size()); + { + final Map stats = testRequestLogger.getSqlQueryLogs().get(0).getQueryStats().getStats(); + Assert.assertEquals(false, stats.get("success")); + Assert.assertEquals("regularUser", stats.get("identity")); + Assert.assertTrue(stats.containsKey("exception")); + } } @Test @@ -1180,8 +1168,13 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase catch (SQLException e) { // Expected } - // SqlLifecycle does not allow logging for security failures. - Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size()); + Assert.assertEquals(1, testRequestLogger.getSqlQueryLogs().size()); + { + final Map stats = testRequestLogger.getSqlQueryLogs().get(0).getQueryStats().getStats(); + Assert.assertEquals(false, stats.get("success")); + Assert.assertEquals("regularUser", stats.get("identity")); + Assert.assertTrue(stats.containsKey("exception")); + } } @Test @@ -1519,6 +1512,28 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase } } + /** + * Verify that a security exception is mapped to the correct Avatica SQL error codes. + */ + @Test + public void testUnauthorizedTable() + { + final String query = "SELECT * FROM " + CalciteTests.FORBIDDEN_DATASOURCE; + final String expectedError = "Error 2 (00002) : Error while executing SQL \"" + + query + "\": Remote driver error: Unauthorized"; + try (Statement statement = client.createStatement()) { + statement.executeQuery(query); + } + catch (SQLException e) { + Assert.assertEquals( + e.getMessage(), + expectedError + ); + return; + } + Assert.fail("Test failed, did not get SQLException"); + } + // Default implementation is for JSON to allow debugging of tests. protected String getJdbcConnectionString(final int port) { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index fba413d9909..0528e6cf9f3 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -20,23 +20,22 @@ package org.apache.druid.sql.avatica; import com.google.common.base.Function; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.remote.TypedValue; -import org.apache.calcite.tools.RelConversionException; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.math.expr.ExprMacroTable; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AllowAllAuthenticator; import org.apache.druid.server.security.AuthTestUtils; -import org.apache.druid.sql.SqlLifecycleFactory; import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; @@ -93,8 +92,7 @@ public class DruidStatementTest extends CalciteTestBase } private SpecificSegmentsQuerySegmentWalker walker; - private SqlLifecycleFactory sqlLifecycleFactory; - private DruidConnection conn; + private SqlStatementFactory sqlLifecycleFactory; @Before public void setUp() throws Exception @@ -116,14 +114,12 @@ public class DruidStatementTest extends CalciteTestBase CalciteTests.DRUID_SCHEMA_NAME, new CalciteRulesManager(ImmutableSet.of()) ); - sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory); - conn = new DruidConnection("dummy", 4, ImmutableMap.of(), ImmutableMap.of()); + this.sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory); } @After public void tearDown() throws Exception { - conn.close(); walker.close(); walker = null; } @@ -139,14 +135,15 @@ public class DruidStatementTest extends CalciteTestBase private DruidJdbcStatement jdbcStatement() { return new DruidJdbcStatement( - conn, + "", 0, + new QueryContext(), sqlLifecycleFactory ); } @Test - public void testSubQueryWithOrderByDirect() throws RelConversionException + public void testSubQueryWithOrderByDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SUB_QUERY_WITH_ORDER_BY, @@ -167,7 +164,7 @@ public class DruidStatementTest extends CalciteTestBase } @Test - public void testFetchPastEOFDirect() throws RelConversionException + public void testFetchPastEOFDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SUB_QUERY_WITH_ORDER_BY, @@ -210,22 +207,6 @@ public class DruidStatementTest extends CalciteTestBase } } - @Test - public void testSignatureDirect() throws RelConversionException - { - SqlQueryPlus queryPlus = new SqlQueryPlus( - SELECT_STAR_FROM_FOO, - null, - null, - AllowAllAuthenticator.ALLOW_ALL_RESULT - ); - try (final DruidJdbcStatement statement = jdbcStatement()) { - // Check signature. - statement.execute(queryPlus, -1); - verifySignature(statement.getSignature()); - } - } - /** * Ensure an error is thrown if the client attempts to fetch from a * statement after its result set is closed. @@ -253,7 +234,7 @@ public class DruidStatementTest extends CalciteTestBase } @Test - public void testSubQueryWithOrderByDirectTwice() throws RelConversionException + public void testSubQueryWithOrderByDirectTwice() { SqlQueryPlus queryPlus = new SqlQueryPlus( SUB_QUERY_WITH_ORDER_BY, @@ -298,7 +279,7 @@ public class DruidStatementTest extends CalciteTestBase } @Test - public void testSelectAllInFirstFrameDirect() throws RelConversionException + public void testSelectAllInFirstFrameDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SELECT_FROM_FOO, @@ -338,10 +319,9 @@ public class DruidStatementTest extends CalciteTestBase /** * Test results spread over two frames. Also checks various state-related * methods. - * @throws RelConversionException */ @Test - public void testSelectSplitOverTwoFramesDirect() throws RelConversionException + public void testSelectSplitOverTwoFramesDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SELECT_FROM_FOO, @@ -376,10 +356,9 @@ public class DruidStatementTest extends CalciteTestBase /** * Verify that JDBC automatically closes the first result set when we * open a second for the same statement. - * @throws RelConversionException */ @Test - public void testTwoFramesAutoCloseDirect() throws RelConversionException + public void testTwoFramesAutoCloseDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SELECT_FROM_FOO, @@ -419,10 +398,9 @@ public class DruidStatementTest extends CalciteTestBase /** * Test that closing a statement with pending results automatically * closes the underlying result set. - * @throws RelConversionException */ @Test - public void testTwoFramesCloseWithResultSetDirect() throws RelConversionException + public void testTwoFramesCloseWithResultSetDirect() { SqlQueryPlus queryPlus = new SqlQueryPlus( SELECT_FROM_FOO, @@ -476,6 +454,22 @@ public class DruidStatementTest extends CalciteTestBase ); } + @Test + public void testSignatureDirect() + { + SqlQueryPlus queryPlus = new SqlQueryPlus( + SELECT_STAR_FROM_FOO, + null, + null, + AllowAllAuthenticator.ALLOW_ALL_RESULT + ); + try (final DruidJdbcStatement statement = jdbcStatement()) { + // Check signature. + statement.execute(queryPlus, -1); + verifySignature(statement.getSignature()); + } + } + @SuppressWarnings("unchecked") private void verifySignature(Meta.Signature signature) { @@ -520,10 +514,9 @@ public class DruidStatementTest extends CalciteTestBase private DruidJdbcPreparedStatement jdbcPreparedStatement(SqlQueryPlus queryPlus) { return new DruidJdbcPreparedStatement( - conn, + "", 0, - queryPlus, - sqlLifecycleFactory, + sqlLifecycleFactory.preparedStatement(queryPlus), Long.MAX_VALUE ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index bcd33dad138..1049228f7b9 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -28,6 +28,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.calcite.plan.RelOptPlanner; +import org.apache.calcite.rel.type.RelDataType; import org.apache.druid.annotations.UsedByJUnitParamsRunner; import org.apache.druid.common.config.NullHandling; import org.apache.druid.hll.VersionOneHyperLogLogCollector; @@ -45,7 +46,6 @@ import org.apache.druid.query.DataSource; import org.apache.druid.query.Druids; import org.apache.druid.query.JoinDataSource; import org.apache.druid.query.Query; -import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; import org.apache.druid.query.QueryRunnerFactoryConglomerate; @@ -84,8 +84,10 @@ import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.PreparedStatement; +import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.external.ExternalDataSource; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; @@ -96,6 +98,7 @@ import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager; +import org.apache.druid.sql.calcite.table.RowSignatures; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; @@ -117,6 +120,7 @@ import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -149,77 +153,36 @@ public class BaseCalciteQueryTest extends CalciteTestBase public static final Logger log = new Logger(BaseCalciteQueryTest.class); public static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig(); - public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE = new PlannerConfig() - { - @Override - public boolean shouldSerializeComplexValues() - { - return false; - } - }; - public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION = new PlannerConfig() - { - @Override - public boolean isRequireTimeCondition() - { - return true; - } - }; - public static final PlannerConfig PLANNER_CONFIG_NO_TOPN = new PlannerConfig() - { - @Override - public int getMaxTopNLimit() - { - return 0; - } - }; - public static final PlannerConfig PLANNER_CONFIG_NO_HLL = new PlannerConfig() - { - @Override - public boolean isUseApproximateCountDistinct() - { - return false; - } - }; - public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES = new PlannerConfig() - { - @Override - public DateTimeZone getSqlTimeZone() - { - return DateTimes.inferTzFromString("America/Los_Angeles"); - } - }; + public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE = + PlannerConfig.builder().serializeComplexValues(false).build(); - public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES = new PlannerConfig() - { - @Override - public boolean isAuthorizeSystemTablesDirectly() - { - return true; - } - }; + public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION = + PlannerConfig.builder().requireTimeCondition(true).build(); - public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN = new PlannerConfig() - { - @Override - public boolean isUseNativeQueryExplain() - { - return true; - } - }; + public static final PlannerConfig PLANNER_CONFIG_NO_TOPN = + PlannerConfig.builder().maxTopNLimit(0).build(); + + public static final PlannerConfig PLANNER_CONFIG_NO_HLL = + PlannerConfig.builder().useApproximateCountDistinct(false).build(); + + public static final String LOS_ANGELES = "America/Los_Angeles"; + public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES = + PlannerConfig + .builder() + .sqlTimeZone(DateTimes.inferTzFromString(LOS_ANGELES)) + .build(); + + public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES = + PlannerConfig.builder().authorizeSystemTablesDirectly(true).build(); + + public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN = + PlannerConfig.builder().useNativeQueryExplain(true).build(); public static final int MAX_NUM_IN_FILTERS = 100; - public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER = new PlannerConfig() - { - @Override - public int getMaxNumericInFilters() - { - return MAX_NUM_IN_FILTERS; - } - }; + public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER = + PlannerConfig.builder().maxNumericInFilters(MAX_NUM_IN_FILTERS).build(); public static final String DUMMY_SQL_ID = "dummy"; - public static final String LOS_ANGELES = "America/Los_Angeles"; private static final ImmutableMap.Builder DEFAULT_QUERY_CONTEXT_BUILDER = ImmutableMap.builder() @@ -644,9 +607,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQuery( final String sql, - final List expectedQueries, + final List> expectedQueries, final List expectedResults - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -662,10 +625,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQuery( final String sql, - final List expectedQueries, + final List> expectedQueries, final List expectedResults, final RowSignature expectedResultRowSignature - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -682,9 +645,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQuery( final String sql, final Map context, - final List expectedQueries, + final List> expectedQueries, final List expectedResults - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -700,10 +663,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQuery( final String sql, - final List expectedQueries, + final List> expectedQueries, final List expectedResults, final List parameters - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -721,9 +684,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase final PlannerConfig plannerConfig, final String sql, final AuthenticationResult authenticationResult, - final List expectedQueries, + final List> expectedQueries, final List expectedResults - ) throws Exception + ) { testQuery( plannerConfig, @@ -740,9 +703,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQuery( final String sql, final Map context, - final List expectedQueries, + final List> expectedQueries, final ResultsVerifier expectedResultsVerifier - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -761,9 +724,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase final Map queryContext, final String sql, final AuthenticationResult authenticationResult, - final List expectedQueries, + final List> expectedQueries, final List expectedResults - ) throws Exception + ) { log.info("SQL: %s", sql); queryLogHook.clearRecordedQueries(); @@ -778,9 +741,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase final List parameters, final String sql, final AuthenticationResult authenticationResult, - final List expectedQueries, + final List> expectedQueries, final List expectedResults - ) throws Exception + ) { testQuery( plannerConfig, @@ -800,10 +763,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase final List parameters, final String sql, final AuthenticationResult authenticationResult, - final List expectedQueries, + final List> expectedQueries, final List expectedResults, final RowSignature expectedResultSignature - ) throws Exception + ) { testQuery( plannerConfig, @@ -823,10 +786,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase final List parameters, final String sql, final AuthenticationResult authenticationResult, - final List expectedQueries, + final List> expectedQueries, final ResultsVerifier expectedResultsVerifier, @Nullable final Consumer expectedExceptionInitializer - ) throws Exception + ) { log.info("SQL: %s", sql); @@ -849,8 +812,8 @@ public class BaseCalciteQueryTest extends CalciteTestBase theQueryContext.put(QueryContexts.VECTOR_SIZE_KEY, 2); // Small vector size to ensure we use more than one. } - final List theQueries = new ArrayList<>(); - for (Query query : expectedQueries) { + final List> theQueries = new ArrayList<>(); + for (Query query : expectedQueries) { theQueries.add(recursivelyOverrideContext(query, theQueryContext)); } @@ -872,7 +835,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase final List parameters, final String sql, final AuthenticationResult authenticationResult - ) throws Exception + ) { return getResults( plannerConfig, @@ -897,9 +860,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase final ExprMacroTable macroTable, final AuthorizerMapper authorizerMapper, final ObjectMapper objectMapper - ) throws Exception + ) { - final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory( + final SqlStatementFactory sqlLifecycleFactory = getSqlLifecycleFactory( plannerConfig, new AuthConfig(), operatorTable, @@ -907,23 +870,24 @@ public class BaseCalciteQueryTest extends CalciteTestBase authorizerMapper, objectMapper ); - SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - - Pair> result = lifecycle.runSimple( - sql, - queryContext, - parameters, - authenticationResult + final DirectStatement stmt = sqlLifecycleFactory.directStatement( + SqlQueryPlus.builder(sql) + .context(queryContext) + .sqlParameters(parameters) + .auth(authenticationResult) + .build() ); + Sequence results = stmt.execute(); + RelDataType rowType = stmt.prepareResult().getRowType(); return new Pair<>( - result.lhs, - result.rhs.toList() + RowSignatures.fromRelDataType(rowType.getFieldNames(), rowType), + results.toList() ); } public void verifyResults( final String sql, - final List expectedQueries, + final List> expectedQueries, final List expectedResults, final Pair> results ) @@ -933,7 +897,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void verifyResults( final String sql, - final List expectedQueries, + final List> expectedQueries, final Pair> results, final ResultsVerifier expectedResultsVerifier ) @@ -950,7 +914,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase private void verifyQueries( final String sql, - @Nullable final List expectedQueries + @Nullable final List> expectedQueries ) { if (expectedQueries != null) { @@ -997,7 +961,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase } public void testQueryThrows(final String sql, Consumer expectedExceptionInitializer) - throws Exception { testQueryThrows(sql, new HashMap<>(QUERY_CONTEXT_DEFAULT), ImmutableList.of(), expectedExceptionInitializer); } @@ -1005,9 +968,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase public void testQueryThrows( final String sql, final Map queryContext, - final List expectedQueries, + final List> expectedQueries, final Consumer expectedExceptionInitializer - ) throws Exception + ) { testQuery( PLANNER_CONFIG_DEFAULT, @@ -1038,7 +1001,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase AuthenticationResult authenticationResult ) { - SqlLifecycleFactory lifecycleFactory = getSqlLifecycleFactory( + SqlStatementFactory lifecycleFactory = getSqlLifecycleFactory( plannerConfig, authConfig, createOperatorTable(), @@ -1047,12 +1010,16 @@ public class BaseCalciteQueryTest extends CalciteTestBase queryJsonMapper ); - SqlLifecycle lifecycle = lifecycleFactory.factorize(); - lifecycle.initialize(sql, new QueryContext(contexts)); - return lifecycle.runAnalyzeResources(authenticationResult); + PreparedStatement stmt = lifecycleFactory.preparedStatement(SqlQueryPlus.builder(sql) + .context(contexts) + .auth(authenticationResult) + .build() + ); + stmt.prepare(); + return stmt.allResources(); } - public SqlLifecycleFactory getSqlLifecycleFactory( + public SqlStatementFactory getSqlLifecycleFactory( PlannerConfig plannerConfig, AuthConfig authConfig, DruidOperatorTable operatorTable, @@ -1085,7 +1052,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase CalciteTests.DRUID_SCHEMA_NAME, new CalciteRulesManager(ImmutableSet.of()) ); - final SqlLifecycleFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory, authConfig); + final SqlStatementFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory, authConfig); viewManager.createView( plannerFactory, @@ -1166,7 +1133,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase private static DataSource recursivelyOverrideContext(final DataSource dataSource, final Map context) { if (dataSource instanceof QueryDataSource) { - final Query subquery = ((QueryDataSource) dataSource).getQuery(); + final Query subquery = ((QueryDataSource) dataSource).getQuery(); return new QueryDataSource(recursivelyOverrideContext(subquery, context)); } else { return dataSource.withChildren( @@ -1236,13 +1203,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase .build(), }; } - - public static Map withOverrides(Map originalContext, Map overrides) - { - Map contextWithOverrides = new HashMap<>(originalContext); - contextWithOverrides.putAll(overrides); - return contextWithOverrides; - } } protected Map withLeftDirectAccessEnabled(Map context) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java index fd9915d67d4..453a8f4ee73 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java @@ -58,6 +58,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests; import org.junit.Assert; import org.junit.Test; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -70,7 +71,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest // test some query stuffs, sort of limited since no native array column types so either need to use constructor or // array aggregator @Test - public void testSelectConstantArrayExpressionFromTable() throws Exception + public void testSelectConstantArrayExpressionFromTable() { testQuery( "SELECT ARRAY[1,2] as arr, dim1 FROM foo LIMIT 1", @@ -92,7 +93,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByArrayFromCase() throws Exception + public void testGroupByArrayFromCase() { cannotVectorize(); testQuery( @@ -121,7 +122,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectNonConstantArrayExpressionFromTable() throws Exception + public void testSelectNonConstantArrayExpressionFromTable() { testQuery( "SELECT ARRAY[CONCAT(dim1, 'word'),'up'] as arr, dim1 FROM foo LIMIT 5", @@ -151,7 +152,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectNonConstantArrayExpressionFromTableForMultival() throws Exception + public void testSelectNonConstantArrayExpressionFromTableForMultival() { final String sql = "SELECT ARRAY[CONCAT(dim3, 'word'),'up'] as arr, dim1 FROM foo LIMIT 5"; final Query scanQuery = newScanQueryBuilder() @@ -204,7 +205,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testSomeArrayFunctionsWithScanQuery() throws Exception + public void testSomeArrayFunctionsWithScanQuery() { // Yes these outputs are strange sometimes, arrays are in a partial state of existence so end up a bit // stringy for now this is because virtual column selectors are coercing values back to stringish so that @@ -356,7 +357,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testSomeArrayFunctionsWithScanQueryNoStringify() throws Exception + public void testSomeArrayFunctionsWithScanQueryNoStringify() { // when not stringifying arrays, some things are still stringified, because they are inferred to be typed as strings // the planner context which controls stringification of arrays does not apply to multi-valued string columns, @@ -474,7 +475,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOverlapFilter() throws Exception + public void testArrayOverlapFilter() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE ARRAY_OVERLAP(dim3, ARRAY['a','b']) LIMIT 5", @@ -497,7 +498,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOverlapFilterNonLiteral() throws Exception + public void testArrayOverlapFilterNonLiteral() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE ARRAY_OVERLAP(dim3, ARRAY[dim2]) LIMIT 5", @@ -519,7 +520,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayContainsFilter() throws Exception + public void testArrayContainsFilter() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY['a','b']) LIMIT 5", @@ -546,7 +547,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayContainsArrayOfOneElement() throws Exception + public void testArrayContainsArrayOfOneElement() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY['a']) LIMIT 5", @@ -568,7 +569,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayContainsArrayOfNonLiteral() throws Exception + public void testArrayContainsArrayOfNonLiteral() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY[dim2]) LIMIT 5", @@ -591,7 +592,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArraySlice() throws Exception + public void testArraySlice() { testQuery( "SELECT ARRAY_SLICE(dim3, 1) FROM druid.numfoo", @@ -619,7 +620,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayLength() throws Exception + public void testArrayLength() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -662,7 +663,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAppend() throws Exception + public void testArrayAppend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -719,7 +720,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayPrepend() throws Exception + public void testArrayPrepend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -776,7 +777,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayPrependAppend() throws Exception + public void testArrayPrependAppend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -840,7 +841,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayConcat() throws Exception + public void testArrayConcat() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -897,7 +898,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOffset() throws Exception + public void testArrayOffset() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -936,7 +937,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayGroupAsLongArray() throws Exception + public void testArrayGroupAsLongArray() { // Cannot vectorize as we donot have support in native query subsytem for grouping on arrays cannotVectorize(); @@ -985,7 +986,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest @Test - public void testArrayGroupAsDoubleArray() throws Exception + public void testArrayGroupAsDoubleArray() { // Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys cannotVectorize(); @@ -1034,7 +1035,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayGroupAsFloatArray() throws Exception + public void testArrayGroupAsFloatArray() { // Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys cannotVectorize(); @@ -1083,7 +1084,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayGroupAsArrayWithFunction() throws Exception + public void testArrayGroupAsArrayWithFunction() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1127,7 +1128,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOrdinal() throws Exception + public void testArrayOrdinal() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1170,7 +1171,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOffsetOf() throws Exception + public void testArrayOffsetOf() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1219,7 +1220,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayOrdinalOf() throws Exception + public void testArrayOrdinalOf() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1269,7 +1270,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayToString() throws Exception + public void testArrayToString() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1325,7 +1326,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayToStringToMultiValueString() throws Exception + public void testArrayToStringToMultiValueString() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1384,7 +1385,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAgg() throws Exception + public void testArrayAgg() { cannotVectorize(); testQuery( @@ -1466,7 +1467,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggMultiValue() throws Exception + public void testArrayAggMultiValue() { cannotVectorize(); testQuery( @@ -1524,7 +1525,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggNumeric() throws Exception + public void testArrayAggNumeric() { cannotVectorize(); testQuery( @@ -1660,7 +1661,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggArrays() throws Exception + public void testArrayAggArrays() { try { ExpressionProcessing.initializeForTests(true); @@ -1762,7 +1763,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggArraysNoNest() throws Exception + public void testArrayAggArraysNoNest() { cannotVectorize(); testQueryThrows( @@ -1823,7 +1824,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayConcatAggArrays() throws Exception + public void testArrayConcatAggArrays() { cannotVectorize(); testQuery( @@ -1884,7 +1885,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggToString() throws Exception + public void testArrayAggToString() { cannotVectorize(); testQuery( @@ -1926,7 +1927,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggExpression() throws Exception + public void testArrayAggExpression() { cannotVectorize(); testQuery( @@ -1970,7 +1971,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggMaxBytes() throws Exception + public void testArrayAggMaxBytes() { cannotVectorize(); testQuery( @@ -2028,7 +2029,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggAsArrayFromJoin() throws Exception + public void testArrayAggAsArrayFromJoin() { cannotVectorize(); List expectedResults; @@ -2110,7 +2111,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggGroupByArrayAggFromSubquery() throws Exception + public void testArrayAggGroupByArrayAggFromSubquery() { cannotVectorize(); @@ -2178,7 +2179,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggGroupByArrayAggOfLongsFromSubquery() throws Exception + public void testArrayAggGroupByArrayAggOfLongsFromSubquery() throws IOException { requireMergeBuffers(3); cannotVectorize(); @@ -2251,7 +2252,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggGroupByArrayAggOfStringsFromSubquery() throws Exception + public void testArrayAggGroupByArrayAggOfStringsFromSubquery() throws IOException { requireMergeBuffers(3); cannotVectorize(); @@ -2317,7 +2318,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggGroupByArrayAggOfDoubleFromSubquery() throws Exception + public void testArrayAggGroupByArrayAggOfDoubleFromSubquery() throws IOException { requireMergeBuffers(3); cannotVectorize(); @@ -2384,7 +2385,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggArrayContainsSubquery() throws Exception + public void testArrayAggArrayContainsSubquery() { cannotVectorize(); List expectedResults; @@ -2467,7 +2468,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggGroupByArrayContainsSubquery() throws Exception + public void testArrayAggGroupByArrayContainsSubquery() { cannotVectorize(); List expectedResults; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCorrelatedQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCorrelatedQueryTest.java index 9a2dca42f49..b019ad090ba 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCorrelatedQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCorrelatedQueryTest.java @@ -58,7 +58,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCorrelatedSubquery(Map queryContext) throws Exception + public void testCorrelatedSubquery(Map queryContext) { cannotVectorize(); queryContext = withLeftDirectAccessEnabled(queryContext); @@ -175,7 +175,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCorrelatedSubqueryWithLeftFilter(Map queryContext) throws Exception + public void testCorrelatedSubqueryWithLeftFilter(Map queryContext) { cannotVectorize(); queryContext = withLeftDirectAccessEnabled(queryContext); @@ -264,7 +264,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map queryContext) throws Exception + public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map queryContext) { cannotVectorize(); @@ -355,7 +355,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map queryContext) throws Exception + public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map queryContext) { cannotVectorize(); queryContext = withLeftDirectAccessEnabled(queryContext); @@ -449,7 +449,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCorrelatedSubqueryWithCorrelatedQueryFilter_Scan(Map queryContext) throws Exception + public void testCorrelatedSubqueryWithCorrelatedQueryFilter_Scan(Map queryContext) { cannotVectorize(); queryContext = withLeftDirectAccessEnabled(queryContext); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java index 31246b4bca1..a19dd3d0380 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java @@ -33,7 +33,7 @@ import java.util.Map; public class CalciteExplainQueryTest extends BaseCalciteQueryTest { @Test - public void testExplainCountStarOnView() throws Exception + public void testExplainCountStarOnView() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -71,7 +71,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainInformationSchemaColumns() throws Exception + public void testExplainInformationSchemaColumns() { final String explanation = "BindableProject(COLUMN_NAME=[$3], DATA_TYPE=[$7])\n" @@ -93,7 +93,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception + public void testExplainExactCountDistinctOfSemiJoinResult() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -142,7 +142,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest // This testcase has been added here and not in CalciteSelectQueryTests since this checks if the overrides are working // properly when displaying the output of "EXPLAIN PLAN FOR ..." queries @Test - public void testExplainSelectStarWithOverrides() throws Exception + public void testExplainSelectStarWithOverrides() { Map useRegularExplainContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); useRegularExplainContext.put(PlannerConfig.CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, false); @@ -213,7 +213,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainMultipleTopLevelUnionAllQueries() throws Exception + public void testExplainMultipleTopLevelUnionAllQueries() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -260,7 +260,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainSelectMvfilterExpressions() throws Exception + public void testExplainSelectMvfilterExpressions() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -328,7 +328,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainSelectTimestampExpression() throws Exception + public void testExplainSelectTimestampExpression() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java index 64c094923a1..c862721dcb2 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java @@ -31,7 +31,6 @@ import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.query.Query; -import org.apache.druid.query.QueryContext; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; @@ -41,8 +40,9 @@ import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; import org.apache.druid.server.security.ResourceType; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.SqlQueryPlus; +import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.external.ExternalDataSource; import org.apache.druid.sql.calcite.parser.DruidSqlInsert; import org.apache.druid.sql.calcite.planner.Calcites; @@ -145,7 +145,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest private String expectedTargetDataSource; private RowSignature expectedTargetSignature; private List expectedResources; - private Query expectedQuery; + private Query expectedQuery; private Matcher validationErrorMatcher; private IngestionDmlTester() @@ -267,7 +267,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest throw new ISE("Test must not have expectedQuery"); } - final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory( + final SqlStatementFactory sqlLifecycleFactory = getSqlLifecycleFactory( plannerConfig, new AuthConfig(), createOperatorTable(), @@ -276,14 +276,18 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest queryJsonMapper ); - final SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - sqlLifecycle.initialize(sql, new QueryContext(queryContext)); + DirectStatement stmt = sqlLifecycleFactory.directStatement( + SqlQueryPlus + .builder(sql) + .context(queryContext) + .auth(authenticationResult) + .build() + ); final Throwable e = Assert.assertThrows( Throwable.class, () -> { - sqlLifecycle.validateAndAuthorize(authenticationResult); - sqlLifecycle.plan(); + stmt.execute(); } ); @@ -291,7 +295,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest Assert.assertTrue(queryLogHook.getRecordedQueries().isEmpty()); } - private void verifySuccess() throws Exception + private void verifySuccess() { if (expectedTargetDataSource == null) { throw new ISE("Test must have expectedTargetDataSource"); @@ -301,7 +305,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest throw new ISE("Test must have expectedResources"); } - final List expectedQueries = + final List> expectedQueries = expectedQuery == null ? Collections.emptyList() : Collections.singletonList(recursivelyOverrideContext(expectedQuery, queryContext)); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index 1470b102915..2dc4ea31428 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -50,6 +50,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.internal.matchers.ThrowableMessageMatcher; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -429,7 +430,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest } @Test - public void testInsertWithClusteredByAndOrderBy() throws Exception + public void testInsertWithClusteredByAndOrderBy() { try { testQuery( @@ -452,7 +453,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest } @Test - public void testInsertWithPartitionedByContainingInvalidGranularity() throws Exception + public void testInsertWithPartitionedByContainingInvalidGranularity() { // Throws a ValidationException, which gets converted to a SqlPlanningException before throwing to end user try { @@ -473,7 +474,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest } @Test - public void testInsertWithOrderBy() throws Exception + public void testInsertWithOrderBy() { try { testQuery( @@ -514,7 +515,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest } @Test - public void testExplainInsertFromExternal() throws Exception + public void testExplainInsertFromExternal() throws IOException { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java index aad48cff19c..2bd1dc2a8aa 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteJoinQueryTest.java @@ -105,9 +105,8 @@ import static org.apache.druid.query.QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KE @RunWith(JUnitParamsRunner.class) public class CalciteJoinQueryTest extends BaseCalciteQueryTest { - @Test - public void testInnerJoinWithLimitAndAlias() throws Exception + public void testInnerJoinWithLimitAndAlias() { minTopNThreshold = 1; Map context = new HashMap<>(QUERY_CONTEXT_DEFAULT); @@ -164,7 +163,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test - public void testExactTopNOnInnerJoinWithLimit() throws Exception + public void testExactTopNOnInnerJoinWithLimit() { // Adjust topN threshold, so that the topN engine keeps only 1 slot for aggregates, which should be enough // to compute the query with limit 1. @@ -214,7 +213,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinOuterGroupByAndSubqueryHasLimit() throws Exception + public void testJoinOuterGroupByAndSubqueryHasLimit() { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -301,7 +300,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testJoinOuterGroupByAndSubqueryNoLimit(Map queryContext) throws Exception + public void testJoinOuterGroupByAndSubqueryNoLimit(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -384,7 +383,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinWithLimitBeforeJoining() throws Exception + public void testJoinWithLimitBeforeJoining() { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -470,7 +469,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinOnTimeseriesWithFloorOnTime() throws Exception + public void testJoinOnTimeseriesWithFloorOnTime() { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -524,7 +523,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinOnGroupByInsteadOfTimeseriesWithFloorOnTime() throws Exception + public void testJoinOnGroupByInsteadOfTimeseriesWithFloorOnTime() { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -593,7 +592,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testFilterAndGroupByLookupUsingJoinOperatorWithValueFilterPushdownMatchesNothig(Map queryContext) - throws Exception + { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -629,7 +628,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testFilterAndGroupByLookupUsingJoinOperatorAllowNulls(Map queryContext) throws Exception + public void testFilterAndGroupByLookupUsingJoinOperatorAllowNulls(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -668,7 +667,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testFilterAndGroupByLookupUsingJoinOperatorBackwards(Map queryContext) throws Exception + public void testFilterAndGroupByLookupUsingJoinOperatorBackwards(Map queryContext) { // Like "testFilterAndGroupByLookupUsingJoinOperator", but with the table and lookup reversed. @@ -717,7 +716,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testFilterAndGroupByLookupUsingJoinOperatorWithNotFilter(Map queryContext) - throws Exception + { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -756,7 +755,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testJoinUnionTablesOnLookup(Map queryContext) throws Exception + public void testJoinUnionTablesOnLookup(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -802,7 +801,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testFilterAndGroupByLookupUsingJoinOperator(Map queryContext) throws Exception + public void testFilterAndGroupByLookupUsingJoinOperator(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -841,7 +840,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testFilterAndGroupByLookupUsingPostAggregationJoinOperator(Map queryContext) - throws Exception + { testQuery( "SELECT base.dim2, lookyloo.v, base.cnt FROM (\n" @@ -891,7 +890,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testGroupByInnerJoinOnLookupUsingJoinOperator(Map queryContext) throws Exception + public void testGroupByInnerJoinOnLookupUsingJoinOperator(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -927,7 +926,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSelectOnLookupUsingInnerJoinOperator(Map queryContext) throws Exception + public void testSelectOnLookupUsingInnerJoinOperator(Map queryContext) { testQuery( "SELECT dim2, lookyloo.*\n" @@ -959,7 +958,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinTwoLookupsUsingJoinOperator(Map queryContext) throws Exception + public void testLeftJoinTwoLookupsUsingJoinOperator(Map queryContext) { testQuery( "SELECT dim1, dim2, l1.v, l2.v\n" @@ -1004,7 +1003,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinTableLookupLookupWithFilterWithOuterLimit(Map queryContext) throws Exception + public void testInnerJoinTableLookupLookupWithFilterWithOuterLimit(Map queryContext) { testQuery( "SELECT dim1\n" @@ -1047,7 +1046,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinTableLookupLookupWithFilterWithoutLimit(Map queryContext) throws Exception + public void testInnerJoinTableLookupLookupWithFilterWithoutLimit(Map queryContext) { testQuery( "SELECT dim1\n" @@ -1089,7 +1088,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testInnerJoinTableLookupLookupWithFilterWithOuterLimitWithAllColumns(Map queryContext) - throws Exception + { testQuery( "SELECT __time, cnt, dim1, dim2, dim3, m1, m2, unique_dim1\n" @@ -1133,7 +1132,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testInnerJoinTableLookupLookupWithFilterWithoutLimitWithAllColumns(Map queryContext) - throws Exception + { testQuery( "SELECT __time, cnt, dim1, dim2, dim3, m1, m2, unique_dim1\n" @@ -1174,7 +1173,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testManyManyInnerJoinOnManyManyLookup(Map queryContext) throws Exception + public void testManyManyInnerJoinOnManyManyLookup(Map queryContext) { testQuery( "SELECT dim1\n" @@ -1403,7 +1402,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinQueryOfLookup(Map queryContext) throws Exception + public void testInnerJoinQueryOfLookup(Map queryContext) { // Cannot vectorize the subquery. cannotVectorize(); @@ -1455,7 +1454,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinQueryOfLookupRemovable(Map queryContext) throws Exception + public void testInnerJoinQueryOfLookupRemovable(Map queryContext) { // Like "testInnerJoinQueryOfLookup", but the subquery is removable. @@ -1493,7 +1492,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinTwoLookupsToTableUsingNumericColumn(Map queryContext) throws Exception + public void testInnerJoinTwoLookupsToTableUsingNumericColumn(Map queryContext) { // Regression test for https://github.com/apache/druid/issues/9646. @@ -1555,7 +1554,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testInnerJoinTwoLookupsToTableUsingNumericColumnInReverse(Map queryContext) - throws Exception + { // Like "testInnerJoinTwoLookupsToTableUsingNumericColumn", but the tables are specified backwards. @@ -1611,7 +1610,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinLookupTableTable(Map queryContext) throws Exception + public void testInnerJoinLookupTableTable(Map queryContext) { // Regression test for https://github.com/apache/druid/issues/9646. @@ -1693,7 +1692,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinLookupTableTableChained(Map queryContext) throws Exception + public void testInnerJoinLookupTableTableChained(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -1773,7 +1772,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test - public void testWhereInSelectNullFromLookup() throws Exception + public void testWhereInSelectNullFromLookup() { // Regression test for https://github.com/apache/druid/issues/9646. cannotVectorize(); @@ -1815,7 +1814,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testCommaJoinLeftFunction() throws Exception + public void testCommaJoinLeftFunction() { testQuery( "SELECT foo.dim1, foo.dim2, l.k, l.v\n" @@ -1853,7 +1852,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest // Hence, comma join will result in a cross join with filter on outermost @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCommaJoinTableLookupTableMismatchedTypes(Map queryContext) throws Exception + public void testCommaJoinTableLookupTableMismatchedTypes(Map queryContext) { // Regression test for https://github.com/apache/druid/issues/9646. @@ -1908,7 +1907,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testJoinTableLookupTableMismatchedTypesWithoutComma(Map queryContext) throws Exception + public void testJoinTableLookupTableMismatchedTypesWithoutComma(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -1975,7 +1974,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinCastLeft(Map queryContext) throws Exception + public void testInnerJoinCastLeft(Map queryContext) { // foo.m1 is FLOAT, l.k is STRING. @@ -2009,7 +2008,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinCastRight(Map queryContext) throws Exception + public void testInnerJoinCastRight(Map queryContext) { // foo.m1 is FLOAT, l.k is STRING. @@ -2053,7 +2052,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinMismatchedTypes(Map queryContext) throws Exception + public void testInnerJoinMismatchedTypes(Map queryContext) { // foo.m1 is FLOAT, l.k is STRING. Comparing them generates a CAST. @@ -2097,7 +2096,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinLeftFunction(Map queryContext) throws Exception + public void testInnerJoinLeftFunction(Map queryContext) { testQuery( "SELECT foo.dim1, foo.dim2, l.k, l.v\n" @@ -2133,7 +2132,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinRightFunction(Map queryContext) throws Exception + public void testInnerJoinRightFunction(Map queryContext) { testQuery( "SELECT foo.dim1, foo.dim2, l.k, l.v\n" @@ -2176,7 +2175,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinLookupOntoLookupUsingJoinOperator(Map queryContext) throws Exception + public void testLeftJoinLookupOntoLookupUsingJoinOperator(Map queryContext) { testQuery( "SELECT dim2, l1.v, l2.v\n" @@ -2219,7 +2218,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinThreeLookupsUsingJoinOperator(Map queryContext) throws Exception + public void testLeftJoinThreeLookupsUsingJoinOperator(Map queryContext) { testQuery( "SELECT dim1, dim2, l1.v, l2.v, l3.v\n" @@ -2269,7 +2268,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSelectOnLookupUsingLeftJoinOperator(Map queryContext) throws Exception + public void testSelectOnLookupUsingLeftJoinOperator(Map queryContext) { testQuery( "SELECT dim1, lookyloo.*\n" @@ -2306,7 +2305,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSelectOnLookupUsingRightJoinOperator(Map queryContext) throws Exception + public void testSelectOnLookupUsingRightJoinOperator(Map queryContext) { testQuery( "SELECT dim1, lookyloo.*\n" @@ -2341,7 +2340,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSelectOnLookupUsingFullJoinOperator(Map queryContext) throws Exception + public void testSelectOnLookupUsingFullJoinOperator(Map queryContext) { testQuery( "SELECT dim1, m1, cnt, lookyloo.*\n" @@ -2381,7 +2380,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInAggregationSubquery(Map queryContext) throws Exception + public void testInAggregationSubquery(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -2431,7 +2430,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testNotInAggregationSubquery(Map queryContext) throws Exception + public void testNotInAggregationSubquery(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -2517,7 +2516,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testUsingSubqueryWithExtractionFns(Map queryContext) throws Exception + public void testUsingSubqueryWithExtractionFns(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -2575,7 +2574,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinWithIsNullFilter(Map queryContext) throws Exception + public void testInnerJoinWithIsNullFilter(Map queryContext) { testQuery( "SELECT dim1, l.v from druid.foo f inner join lookup.lookyloo l on f.dim1 = l.k where f.dim2 is null", @@ -2608,7 +2607,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) @Ignore // regression test for https://github.com/apache/druid/issues/9924 - public void testInnerJoinOnMultiValueColumn(Map queryContext) throws Exception + public void testInnerJoinOnMultiValueColumn(Map queryContext) { cannotVectorize(); testQuery( @@ -2648,7 +2647,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter(Map queryContext) throws Exception + public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter(Map queryContext) { testQuery( "with abc as\n" @@ -2718,7 +2717,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter_withLeftDirectAccess(Map queryContext) - throws Exception { queryContext = withLeftDirectAccessEnabled(queryContext); testQuery( @@ -2776,7 +2774,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere(Map queryContext) throws Exception + public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere(Map queryContext) { testQuery( "with abc as\n" @@ -2831,7 +2829,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere_withLeftDirectAccess(Map queryContext) - throws Exception { queryContext = withLeftDirectAccessEnabled(queryContext); testQuery( @@ -2879,7 +2876,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinOnTwoInlineDataSources(Map queryContext) throws Exception + public void testLeftJoinOnTwoInlineDataSources(Map queryContext) { testQuery( "with abc as\n" @@ -2932,7 +2929,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinOnTwoInlineDataSources_withLeftDirectAccess(Map queryContext) throws Exception + public void testLeftJoinOnTwoInlineDataSources_withLeftDirectAccess(Map queryContext) { queryContext = withLeftDirectAccessEnabled(queryContext); testQuery( @@ -2980,7 +2977,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere(Map queryContext) throws Exception + public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere(Map queryContext) { Druids.ScanQueryBuilder baseScanBuilder = newScanQueryBuilder() .dataSource( @@ -3035,7 +3032,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere_withLeftDirectAccess(Map queryContext) - throws Exception { queryContext = withLeftDirectAccessEnabled(queryContext); testQuery( @@ -3083,7 +3079,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinOnTwoInlineDataSources(Map queryContext) throws Exception + public void testInnerJoinOnTwoInlineDataSources(Map queryContext) { testQuery( "with abc as\n" @@ -3137,7 +3133,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testInnerJoinOnTwoInlineDataSources_withLeftDirectAccess(Map queryContext) - throws Exception { queryContext = withLeftDirectAccessEnabled(queryContext); testQuery( @@ -3195,7 +3190,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testLeftJoinRightTableCanBeEmpty() throws Exception + public void testLeftJoinRightTableCanBeEmpty() { // HashJoinSegmentStorageAdapter is not vectorizable cannotVectorize(); @@ -3283,7 +3278,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinSubqueryWithNullKeyFilter(Map queryContext) throws Exception + public void testLeftJoinSubqueryWithNullKeyFilter(Map queryContext) { // Cannot vectorize due to 'concat' expression. cannotVectorize(); @@ -3366,14 +3361,14 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinSubqueryWithSelectorFilter(Map queryContext) throws Exception + public void testLeftJoinSubqueryWithSelectorFilter(Map queryContext) { // Cannot vectorize due to 'concat' expression. cannotVectorize(); // disable the cost model where inner join is treated like a filter // this leads to cost(left join) < cost(converted inner join) for the below query - queryContext = QueryContextForJoinProvider.withOverrides( + queryContext = QueryContexts.override( queryContext, ImmutableMap.of("computeInnerJoinCostAsFilter", "false") ); @@ -3419,7 +3414,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testLeftJoinWithNotNullFilter(Map queryContext) throws Exception + public void testLeftJoinWithNotNullFilter(Map queryContext) { testQuery( "SELECT s.dim1, t.dim1\n" @@ -3461,11 +3456,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest ); } - - @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinSubqueryWithSelectorFilter(Map queryContext) throws Exception + public void testInnerJoinSubqueryWithSelectorFilter(Map queryContext) { // Cannot vectorize due to 'concat' expression. cannotVectorize(); @@ -3519,7 +3512,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testSemiJoinWithOuterTimeExtractScan() throws Exception + public void testSemiJoinWithOuterTimeExtractScan() { testQuery( "SELECT dim1, EXTRACT(MONTH FROM __time) FROM druid.foo\n" @@ -3565,7 +3558,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testTwoSemiJoinsSimultaneously(Map queryContext) throws Exception + public void testTwoSemiJoinsSimultaneously(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -3630,7 +3623,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery(Map queryContext) throws Exception + public void testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery(Map queryContext) { cannotVectorize(); @@ -3736,7 +3729,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testSemiAndAntiJoinSimultaneouslyUsingExplicitJoins(Map queryContext) throws Exception + public void testSemiAndAntiJoinSimultaneouslyUsingExplicitJoins(Map queryContext) { cannotVectorize(); @@ -3803,7 +3796,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testSemiJoinWithOuterTimeExtractAggregateWithOrderBy() throws Exception + public void testSemiJoinWithOuterTimeExtractAggregateWithOrderBy() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -3885,7 +3878,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest // TODO: Remove expected Exception when https://github.com/apache/druid/issues/9924 is fixed @Test(expected = QueryException.class) @Parameters(source = QueryContextForJoinProvider.class) - public void testJoinOnMultiValuedColumnShouldThrowException(Map queryContext) throws Exception + public void testJoinOnMultiValuedColumnShouldThrowException(Map queryContext) { final String query = "SELECT dim3, l.v from druid.foo f inner join lookup.lookyloo l on f.dim3 = l.k\n"; @@ -3899,7 +3892,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testUnionAllTwoQueriesLeftQueryIsJoin(Map queryContext) throws Exception + public void testUnionAllTwoQueriesLeftQueryIsJoin(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -3940,7 +3933,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testUnionAllTwoQueriesRightQueryIsJoin(Map queryContext) throws Exception + public void testUnionAllTwoQueriesRightQueryIsJoin(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -3980,7 +3973,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllTwoQueriesBothQueriesAreJoin() throws Exception + public void testUnionAllTwoQueriesBothQueriesAreJoin() { cannotVectorize(); @@ -4027,7 +4020,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testTopNFilterJoin(Map queryContext) throws Exception + public void testTopNFilterJoin(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -4103,7 +4096,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testTopNFilterJoinWithProjection(Map queryContext) throws Exception + public void testTopNFilterJoinWithProjection(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -4175,7 +4168,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) @Ignore("Stopped working after the ability to join on subqueries was added to DruidJoinRule") - public void testRemovableLeftJoin(Map queryContext) throws Exception + public void testRemovableLeftJoin(Map queryContext) { // LEFT JOIN where the right-hand side can be ignored. @@ -4229,7 +4222,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCountDistinctOfLookupUsingJoinOperator(Map queryContext) throws Exception + public void testCountDistinctOfLookupUsingJoinOperator(Map queryContext) { // Cannot yet vectorize the JOIN operator. cannotVectorize(); @@ -4271,7 +4264,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testUsingSubqueryAsPartOfAndFilter(Map queryContext) throws Exception + public void testUsingSubqueryAsPartOfAndFilter(Map queryContext) { // Fully removing the join allows this query to vectorize. if (!isRewriteJoinToFilter(queryContext)) { @@ -4334,7 +4327,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testUsingSubqueryAsPartOfOrFilter(Map queryContext) throws Exception + public void testUsingSubqueryAsPartOfOrFilter(Map queryContext) { // Cannot vectorize JOIN operator. cannotVectorize(); @@ -4424,7 +4417,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testNestedGroupByOnInlineDataSourceWithFilter(Map queryContext) throws Exception + public void testNestedGroupByOnInlineDataSourceWithFilter(Map queryContext) { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -4576,7 +4569,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testCountOnSemiJoinSingleColumn(Map queryContext) throws Exception + public void testCountOnSemiJoinSingleColumn(Map queryContext) { testQuery( "SELECT dim1 FROM foo WHERE dim1 IN (SELECT dim1 FROM foo WHERE dim1 = '10.1')\n", @@ -4617,7 +4610,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testTopNOnStringWithNonSortedOrUniqueDictionary(Map queryContext) throws Exception + public void testTopNOnStringWithNonSortedOrUniqueDictionary(Map queryContext) { testQuery( "SELECT druid.broadcast.dim4, COUNT(*)\n" @@ -4658,7 +4651,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) public void testTopNOnStringWithNonSortedOrUniqueDictionaryOrderByDim(Map queryContext) - throws Exception + { testQuery( "SELECT druid.broadcast.dim4, COUNT(*)\n" @@ -4697,7 +4690,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testVirtualColumnOnMVFilterJoinExpression(Map queryContext) throws Exception + public void testVirtualColumnOnMVFilterJoinExpression(Map queryContext) { testQuery( "SELECT foo1.dim3, foo2.dim3 FROM druid.numfoo as foo1 INNER JOIN druid.numfoo as foo2 " @@ -4746,7 +4739,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testVirtualColumnOnMVFilterMultiJoinExpression(Map queryContext) throws Exception + public void testVirtualColumnOnMVFilterMultiJoinExpression(Map queryContext) { testQuery( "SELECT foo1.dim3, foo2.dim3 FROM druid.numfoo as foo1 INNER JOIN " @@ -4818,7 +4811,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinWithFilterPushdownAndManyFiltersEmptyResults(Map queryContext) throws Exception + public void testInnerJoinWithFilterPushdownAndManyFiltersEmptyResults(Map queryContext) { // create the query we expect ScanQuery query = newScanQueryBuilder() @@ -4937,7 +4930,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest @Test @Parameters(source = QueryContextForJoinProvider.class) - public void testInnerJoinWithFilterPushdownAndManyFiltersNonEmptyResults(Map queryContext) throws Exception + public void testInnerJoinWithFilterPushdownAndManyFiltersNonEmptyResults(Map queryContext) { // create the query we expect ScanQuery query = newScanQueryBuilder() @@ -5062,7 +5055,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest } @Test - public void testPlanWithInFilterMoreThanInSubQueryThreshold() throws Exception + public void testPlanWithInFilterMoreThanInSubQueryThreshold() { String query = "SELECT l1 FROM numfoo WHERE l1 IN (4842, 4844, 4845, 14905, 4853, 29064)"; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteMultiValueStringQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteMultiValueStringQueryTest.java index a62402cba40..59b437fca8a 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteMultiValueStringQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteMultiValueStringQueryTest.java @@ -55,7 +55,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest { // various queries on multi-valued string dimensions using them like strings @Test - public void testMultiValueStringWorksLikeStringGroupBy() throws Exception + public void testMultiValueStringWorksLikeStringGroupBy() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -112,7 +112,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringGroupByDoesNotWork() throws Exception + public void testMultiValueStringGroupByDoesNotWork() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -136,7 +136,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringWorksLikeStringGroupByWithFilter() throws Exception + public void testMultiValueStringWorksLikeStringGroupByWithFilter() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -176,7 +176,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringWorksLikeStringScan() throws Exception + public void testMultiValueStringWorksLikeStringScan() { final String nullVal = NullHandling.replaceWithDefault() ? "[\"foo\"]" : "[null]"; testQuery( @@ -204,7 +204,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringWorksLikeStringSelfConcatScan() throws Exception + public void testMultiValueStringWorksLikeStringSelfConcatScan() { final String nullVal = NullHandling.replaceWithDefault() ? "[\"-lol-\"]" : "[null]"; testQuery( @@ -232,7 +232,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringWorksLikeStringScanWithFilter() throws Exception + public void testMultiValueStringWorksLikeStringScanWithFilter() { testQuery( "SELECT concat(dim3, 'foo') FROM druid.numfoo where concat(dim3, 'foo') = 'bfoo'", @@ -257,7 +257,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest // these are a copy of the ARRAY functions tests in CalciteArraysQueryTest @Test - public void testMultiValueStringOverlapFilter() throws Exception + public void testMultiValueStringOverlapFilter() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE MV_OVERLAP(dim3, ARRAY['a','b']) LIMIT 5", @@ -280,7 +280,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringOverlapFilterNonLiteral() throws Exception + public void testMultiValueStringOverlapFilterNonLiteral() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE MV_OVERLAP(dim3, ARRAY[dim2]) LIMIT 5", @@ -300,7 +300,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringContainsFilter() throws Exception + public void testMultiValueStringContainsFilter() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY['a','b']) LIMIT 5", @@ -327,7 +327,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringContainsArrayOfOneElement() throws Exception + public void testMultiValueStringContainsArrayOfOneElement() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY['a']) LIMIT 5", @@ -349,7 +349,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringContainsArrayOfNonLiteral() throws Exception + public void testMultiValueStringContainsArrayOfNonLiteral() { testQuery( "SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY[dim2]) LIMIT 5", @@ -371,7 +371,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringSlice() throws Exception + public void testMultiValueStringSlice() { testQuery( "SELECT MV_SLICE(dim3, 1) FROM druid.numfoo", @@ -398,7 +398,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringLength() throws Exception + public void testMultiValueStringLength() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -441,7 +441,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringAppend() throws Exception + public void testMultiValueStringAppend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -501,7 +501,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringPrepend() throws Exception + public void testMultiValueStringPrepend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -561,7 +561,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringPrependAppend() throws Exception + public void testMultiValueStringPrependAppend() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -625,7 +625,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringConcat() throws Exception + public void testMultiValueStringConcat() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -683,7 +683,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringConcatBackwardsCompat0dot22andOlder() throws Exception + public void testMultiValueStringConcatBackwardsCompat0dot22andOlder() { try { ExpressionProcessing.initializeForHomogenizeNullMultiValueStrings(); @@ -747,7 +747,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringOffset() throws Exception + public void testMultiValueStringOffset() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -786,7 +786,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringOrdinal() throws Exception + public void testMultiValueStringOrdinal() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -825,7 +825,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringOffsetOf() throws Exception + public void testMultiValueStringOffsetOf() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -874,7 +874,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringOrdinalOf() throws Exception + public void testMultiValueStringOrdinalOf() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -924,7 +924,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringToString() throws Exception + public void testMultiValueStringToString() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -980,7 +980,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueStringToStringToMultiValueString() throws Exception + public void testMultiValueStringToStringToMultiValueString() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1041,7 +1041,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest @Test - public void testMultiValueListFilter() throws Exception + public void testMultiValueListFilter() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1086,7 +1086,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterDeny() throws Exception + public void testMultiValueListFilterDeny() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1141,7 +1141,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposed() throws Exception + public void testMultiValueListFilterComposed() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1198,7 +1198,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposedNested() throws Exception + public void testMultiValueListFilterComposedNested() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1265,7 +1265,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposedNested2Input() throws Exception + public void testMultiValueListFilterComposedNested2Input() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1320,7 +1320,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposedNestedNullLiteral() throws Exception + public void testMultiValueListFilterComposedNestedNullLiteral() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1380,7 +1380,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposedDeny() throws Exception + public void testMultiValueListFilterComposedDeny() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1429,7 +1429,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueListFilterComposedMultipleExpressions() throws Exception + public void testMultiValueListFilterComposedMultipleExpressions() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1490,7 +1490,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnMultiValueListFilterNoMatch() throws Exception + public void testFilterOnMultiValueListFilterNoMatch() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1533,7 +1533,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnMultiValueListFilterMatch() throws Exception + public void testFilterOnMultiValueListFilterMatch() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1580,7 +1580,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnMultiValueListFilterMatchLike() throws Exception + public void testFilterOnMultiValueListFilterMatchLike() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1627,7 +1627,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayGroupAsArrayWithMultiValueDimension() throws Exception + public void testMultiValueToArrayGroupAsArrayWithMultiValueDimension() { // Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys cannotVectorize(); @@ -1679,7 +1679,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest @Test - public void testMultiValueToArrayGroupAsArrayWithSingleValueDim() throws Exception + public void testMultiValueToArrayGroupAsArrayWithSingleValueDim() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1733,7 +1733,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayGroupAsArrayWithSingleValueDimIsNotConvertedToTopN() throws Exception + public void testMultiValueToArrayGroupAsArrayWithSingleValueDimIsNotConvertedToTopN() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -1789,7 +1789,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayMoreArgs() throws Exception + public void testMultiValueToArrayMoreArgs() { testQueryThrows( "SELECT MV_TO_ARRAY(dim3,dim3) FROM druid.numfoo", @@ -1801,7 +1801,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayNoArgs() throws Exception + public void testMultiValueToArrayNoArgs() { testQueryThrows( "SELECT MV_TO_ARRAY() FROM druid.numfoo", @@ -1813,7 +1813,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayArgsWithMultiValueDimFunc() throws Exception + public void testMultiValueToArrayArgsWithMultiValueDimFunc() { testQueryThrows( "SELECT MV_TO_ARRAY(concat(dim3,'c')) FROM druid.numfoo", @@ -1822,7 +1822,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayArgsWithSingleDimFunc() throws Exception + public void testMultiValueToArrayArgsWithSingleDimFunc() { testQueryThrows( "SELECT MV_TO_ARRAY(concat(dim1,'c')) FROM druid.numfoo", @@ -1831,7 +1831,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayArgsWithConstant() throws Exception + public void testMultiValueToArrayArgsWithConstant() { testQueryThrows( "SELECT MV_TO_ARRAY(concat(dim1,'c')) FROM druid.numfoo", @@ -1840,7 +1840,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest } @Test - public void testMultiValueToArrayArgsWithArray() throws Exception + public void testMultiValueToArrayArgsWithArray() { testQueryThrows( "SELECT MV_TO_ARRAY(Array[1,2]) FROM druid.numfoo", diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java index 1001604c6da..52ffa3d0ad1 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java @@ -203,7 +203,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPath() throws Exception + public void testGroupByPath() { testQuery( "SELECT " @@ -240,7 +240,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByJsonValue() throws Exception + public void testGroupByJsonValue() { testQuery( "SELECT " @@ -277,7 +277,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testTopNPath() throws Exception + public void testTopNPath() { testQuery( "SELECT " @@ -314,7 +314,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRootPath() throws Exception + public void testGroupByRootPath() { testQuery( "SELECT " @@ -351,7 +351,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByGetPaths() throws Exception + public void testGroupByGetPaths() { testQuery( "SELECT " @@ -399,7 +399,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByJsonGetPaths() throws Exception + public void testGroupByJsonGetPaths() { testQuery( "SELECT " @@ -447,7 +447,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByJsonValues() throws Exception + public void testGroupByJsonValues() { testQuery( "SELECT " @@ -487,7 +487,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilter() throws Exception + public void testGroupByPathSelectorFilter() { testQuery( "SELECT " @@ -526,7 +526,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterLong() throws Exception + public void testGroupByPathSelectorFilterLong() { testQuery( "SELECT " @@ -566,7 +566,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterDouble() throws Exception + public void testGroupByPathSelectorFilterDouble() { testQuery( "SELECT " @@ -606,7 +606,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterString() throws Exception + public void testGroupByPathSelectorFilterString() { testQuery( "SELECT " @@ -646,7 +646,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterVariant() throws Exception + public void testGroupByPathSelectorFilterVariant() { testQuery( "SELECT " @@ -681,7 +681,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterVariant2() throws Exception + public void testGroupByPathSelectorFilterVariant2() { testQuery( "SELECT " @@ -719,7 +719,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterVariant3() throws Exception + public void testGroupByPathSelectorFilterVariant3() { testQuery( "SELECT " @@ -757,7 +757,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterNonExistent() throws Exception + public void testGroupByPathSelectorFilterNonExistent() { testQuery( "SELECT " @@ -792,7 +792,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathSelectorFilterNull() throws Exception + public void testGroupByPathSelectorFilterNull() { testQuery( "SELECT " @@ -830,7 +830,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLong() throws Exception + public void testGroupByPathBoundFilterLong() { testQuery( "SELECT " @@ -867,7 +867,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLongNoUpper() throws Exception + public void testGroupByPathBoundFilterLongNoUpper() { testQuery( "SELECT " @@ -904,7 +904,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLongNoLower() throws Exception + public void testGroupByPathBoundFilterLongNoLower() { testQuery( "SELECT " @@ -940,7 +940,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLongNumeric() throws Exception + public void testGroupByPathBoundFilterLongNumeric() { testQuery( "SELECT " @@ -978,7 +978,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLongNoUpperNumeric() throws Exception + public void testGroupByPathBoundFilterLongNoUpperNumeric() { testQuery( "SELECT " @@ -1016,7 +1016,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathNumericBoundFilterLongNoUpperNumeric() throws Exception + public void testGroupByPathNumericBoundFilterLongNoUpperNumeric() { testQuery( "SELECT " @@ -1053,7 +1053,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterLongNoLowerNumeric() throws Exception + public void testGroupByPathBoundFilterLongNoLowerNumeric() { testQuery( "SELECT " @@ -1091,7 +1091,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterDouble() throws Exception + public void testGroupByPathBoundFilterDouble() { testQuery( "SELECT " @@ -1128,7 +1128,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterDoubleNoUpper() throws Exception + public void testGroupByPathBoundFilterDoubleNoUpper() { testQuery( "SELECT " @@ -1165,7 +1165,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterDoubleNoLower() throws Exception + public void testGroupByPathBoundFilterDoubleNoLower() { testQuery( "SELECT " @@ -1201,7 +1201,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundDoubleFilterNumeric() throws Exception + public void testGroupByPathBoundDoubleFilterNumeric() { testQuery( "SELECT " @@ -1239,7 +1239,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterDoubleNoUpperNumeric() throws Exception + public void testGroupByPathBoundFilterDoubleNoUpperNumeric() { testQuery( "SELECT " @@ -1277,7 +1277,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterDoubleNoLowerNumeric() throws Exception + public void testGroupByPathBoundFilterDoubleNoLowerNumeric() { testQuery( "SELECT " @@ -1315,7 +1315,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterString() throws Exception + public void testGroupByPathBoundFilterString() { testQuery( "SELECT " @@ -1352,7 +1352,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterStringNoUpper() throws Exception + public void testGroupByPathBoundFilterStringNoUpper() { testQuery( "SELECT " @@ -1390,7 +1390,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathBoundFilterStringNoLower() throws Exception + public void testGroupByPathBoundFilterStringNoLower() { testQuery( "SELECT " @@ -1428,7 +1428,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathLikeFilter() throws Exception + public void testGroupByPathLikeFilter() { testQuery( "SELECT " @@ -1464,7 +1464,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathLikeFilterStringPrefix() throws Exception + public void testGroupByPathLikeFilterStringPrefix() { testQuery( "SELECT " @@ -1501,7 +1501,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathLikeFilterString() throws Exception + public void testGroupByPathLikeFilterString() { testQuery( "SELECT " @@ -1538,7 +1538,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathLikeFilterVariant() throws Exception + public void testGroupByPathLikeFilterVariant() { testQuery( "SELECT " @@ -1575,7 +1575,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathInFilter() throws Exception + public void testGroupByPathInFilter() { testQuery( "SELECT " @@ -1613,7 +1613,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathInFilterDouble() throws Exception + public void testGroupByPathInFilterDouble() { testQuery( "SELECT " @@ -1651,7 +1651,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathInFilterString() throws Exception + public void testGroupByPathInFilterString() { testQuery( "SELECT " @@ -1689,7 +1689,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByPathInFilterVariant() throws Exception + public void testGroupByPathInFilterVariant() { testQuery( "SELECT " @@ -1726,7 +1726,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testSumPath() throws Exception + public void testSumPath() { testQuery( "SELECT " @@ -1753,7 +1753,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest @Test - public void testSumPathFilteredAggDouble() throws Exception + public void testSumPathFilteredAggDouble() { // this one actually equals 2.1 because the filter is a long so double is cast and is 1 so both rows match testQuery( @@ -1791,7 +1791,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testSumPathFilteredAggString() throws Exception + public void testSumPathFilteredAggString() { testQuery( "SELECT " @@ -1828,7 +1828,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testSumPathMixed() throws Exception + public void testSumPathMixed() { // throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]" skipVectorize(); @@ -1856,7 +1856,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testSumPathMixedFilteredAggLong() throws Exception + public void testSumPathMixedFilteredAggLong() { // throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]" skipVectorize(); @@ -1896,7 +1896,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testSumPathMixedFilteredAggDouble() throws Exception + public void testSumPathMixedFilteredAggDouble() { // throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]" skipVectorize(); @@ -1932,7 +1932,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testCastAndSumPath() throws Exception + public void testCastAndSumPath() { testQuery( "SELECT " @@ -1959,7 +1959,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest @Test - public void testCastAndSumPathStrings() throws Exception + public void testCastAndSumPathStrings() { testQuery( "SELECT " @@ -1985,7 +1985,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testReturningAndSumPath() throws Exception + public void testReturningAndSumPath() { testQuery( "SELECT " @@ -2012,7 +2012,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest @Test - public void testReturningAndSumPathStrings() throws Exception + public void testReturningAndSumPathStrings() { testQuery( "SELECT " @@ -2038,7 +2038,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRootKeys() throws Exception + public void testGroupByRootKeys() { cannotVectorize(); testQuery( @@ -2080,7 +2080,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRootKeysJsonPath() throws Exception + public void testGroupByRootKeysJsonPath() { cannotVectorize(); testQuery( @@ -2122,7 +2122,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRootKeys2() throws Exception + public void testGroupByRootKeys2() { cannotVectorize(); testQuery( @@ -2165,7 +2165,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByAllPaths() throws Exception + public void testGroupByAllPaths() { cannotVectorize(); testQuery( @@ -2207,7 +2207,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByNestedArrayPath() throws Exception + public void testGroupByNestedArrayPath() { testQuery( "SELECT " @@ -2243,7 +2243,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByInvalidPath() throws Exception + public void testGroupByInvalidPath() { testQueryThrows( "SELECT " @@ -2259,7 +2259,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testJsonQuery() throws Exception + public void testJsonQuery() { testQuery( "SELECT JSON_QUERY(nester, '$.n'), JSON_QUERY(nester, '$')\n" @@ -2311,7 +2311,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testJsonQueryAndJsonObject() throws Exception + public void testJsonQueryAndJsonObject() { testQuery( "SELECT JSON_OBJECT(KEY 'n' VALUE JSON_QUERY(nester, '$.n'), KEY 'x' VALUE JSON_VALUE(nest, '$.x'))\n" @@ -2359,7 +2359,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest } @Test - public void testToJsonAndParseJson() throws Exception + public void testToJsonAndParseJson() { testQuery( "SELECT string, TO_JSON(string), PARSE_JSON(string), PARSE_JSON('{\"foo\":1}'), PARSE_JSON(TO_JSON_STRING(nester))\n" diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index 179e690c1b1..bfdec4128e8 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -56,7 +56,7 @@ import java.util.List; public class CalciteParameterQueryTest extends BaseCalciteQueryTest { @Test - public void testSelectConstantParamGetsConstant() throws Exception + public void testSelectConstantParamGetsConstant() { testQuery( "SELECT 1 + ?", @@ -82,7 +82,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParamsGetOptimizedIntoConstant() throws Exception + public void testParamsGetOptimizedIntoConstant() { testQuery( "SELECT 1 + ?, dim1 FROM foo LIMIT ?", @@ -108,7 +108,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParametersInSelectAndFilter() throws Exception + public void testParametersInSelectAndFilter() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -140,7 +140,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectTrimFamilyWithParameters() throws Exception + public void testSelectTrimFamilyWithParameters() { // TRIM has some whacky parsing. Abuse this to test a bunch of parameters @@ -207,7 +207,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParamsInInformationSchema() throws Exception + public void testParamsInInformationSchema() { // Not including COUNT DISTINCT, since it isn't supported by BindableAggregate, and so it can't work. testQuery( @@ -231,7 +231,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParamsInSelectExpressionAndLimit() throws Exception + public void testParamsInSelectExpressionAndLimit() { testQuery( "SELECT SUBSTRING(dim2, ?, ?) FROM druid.foo LIMIT ?", @@ -261,7 +261,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParamsTuckedInACast() throws Exception + public void testParamsTuckedInACast() { testQuery( "SELECT dim1, m1, COUNT(*) FROM druid.foo WHERE m1 - CAST(? as INT) = dim1 GROUP BY dim1, m1", @@ -294,7 +294,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParametersInStrangePlaces() throws Exception + public void testParametersInStrangePlaces() { testQuery( "SELECT\n" @@ -338,7 +338,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testParametersInCases() throws Exception + public void testParametersInCases() { testQuery( "SELECT\n" @@ -372,7 +372,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest @Test - public void testTimestamp() throws Exception + public void testTimestamp() { // with millis testQuery( @@ -409,7 +409,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testTimestampString() throws Exception + public void testTimestampString() { // with timestampstring testQuery( @@ -445,7 +445,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testDate() throws Exception + public void testDate() { // with date from millis @@ -482,7 +482,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testDoubles() throws Exception + public void testDoubles() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt > ? and cnt < ?", @@ -531,7 +531,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testFloats() throws Exception + public void testFloats() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt = ?", @@ -553,7 +553,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testLongs() throws Exception + public void testLongs() { testQuery( "SELECT COUNT(*)\n" @@ -575,7 +575,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testMissingParameter() throws Exception + public void testMissingParameter() { expectedException.expect(SqlPlanningException.class); expectedException.expectMessage("Parameter at position [0] is not bound"); @@ -590,7 +590,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testPartiallyMissingParameter() throws Exception + public void testPartiallyMissingParameter() { expectedException.expect(SqlPlanningException.class); expectedException.expectMessage("Parameter at position [1] is not bound"); @@ -605,7 +605,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testPartiallyMissingParameterInTheMiddle() throws Exception + public void testPartiallyMissingParameterInTheMiddle() { List params = new ArrayList<>(); params.add(null); @@ -621,7 +621,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testWrongTypeParameter() throws Exception + public void testWrongTypeParameter() { if (!useDefault) { // cannot vectorize inline datasource @@ -666,7 +666,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest } @Test - public void testNullParameter() throws Exception + public void testNullParameter() { cannotVectorize(); // contrived example of using null as an sql parameter to at least test the codepath because lots of things dont diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 1b3f2ce21bc..8bdda17d27a 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -127,6 +127,7 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.internal.matchers.ThrowableMessageMatcher; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -138,7 +139,7 @@ import java.util.stream.Collectors; public class CalciteQueryTest extends BaseCalciteQueryTest { @Test - public void testGroupByWithPostAggregatorReferencingTimeFloorColumnOnTimeseries() throws Exception + public void testGroupByWithPostAggregatorReferencingTimeFloorColumnOnTimeseries() { cannotVectorize(); @@ -184,7 +185,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaSchemata() throws Exception + public void testInformationSchemaSchemata() { testQuery( "SELECT DISTINCT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA", @@ -200,7 +201,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaTables() throws Exception + public void testInformationSchemaTables() { testQuery( "SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE, IS_JOINABLE, IS_BROADCAST\n" @@ -274,7 +275,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaColumnsOnTable() throws Exception + public void testInformationSchemaColumnsOnTable() { testQuery( "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n" @@ -295,7 +296,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaColumnsOnForbiddenTable() throws Exception + public void testInformationSchemaColumnsOnForbiddenTable() { testQuery( "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n" @@ -325,7 +326,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaColumnsOnView() throws Exception + public void testInformationSchemaColumnsOnView() { testQuery( "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n" @@ -339,7 +340,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInformationSchemaColumnsOnAnotherView() throws Exception + public void testInformationSchemaColumnsOnAnotherView() { testQuery( "SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n" @@ -355,7 +356,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testAggregatorsOnInformationSchemaColumns() throws Exception + public void testAggregatorsOnInformationSchemaColumns() { // Not including COUNT DISTINCT, since it isn't supported by BindableAggregate, and so it can't work. testQuery( @@ -375,7 +376,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTopNLimitWrapping() throws Exception + public void testTopNLimitWrapping() { List expected; if (NullHandling.replaceWithDefault()) { @@ -412,7 +413,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTopNLimitWrappingOrderByAgg() throws Exception + public void testTopNLimitWrappingOrderByAgg() { testQuery( "SELECT dim1, COUNT(*) FROM druid.foo GROUP BY 1 ORDER BY 2 DESC", @@ -433,7 +434,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByLimitWrapping() throws Exception + public void testGroupByLimitWrapping() { List expected; if (NullHandling.replaceWithDefault()) { @@ -475,7 +476,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithForceLimitPushDown() throws Exception + public void testGroupByWithForceLimitPushDown() { final Map context = new HashMap<>(QUERY_CONTEXT_DEFAULT); context.put(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true); @@ -507,7 +508,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByLimitWrappingOrderByAgg() throws Exception + public void testGroupByLimitWrappingOrderByAgg() { testQuery( "SELECT dim1, dim2, COUNT(*) FROM druid.foo GROUP BY 1, 2 ORDER BY 3 DESC", @@ -540,7 +541,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupBySingleColumnDescendingNoTopN() throws Exception + public void testGroupBySingleColumnDescendingNoTopN() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -579,7 +580,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testEarliestAggregators() throws Exception + public void testEarliestAggregators() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -627,7 +628,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLatestVectorAggregators() throws Exception + public void testLatestVectorAggregators() { testQuery( "SELECT " @@ -660,7 +661,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLatestAggregators() throws Exception + public void testLatestAggregators() { testQuery( @@ -706,7 +707,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testEarliestByInvalidTimestamp() throws Exception + public void testEarliestByInvalidTimestamp() { expectedException.expect(SqlPlanningException.class); expectedException.expectMessage("Cannot apply 'EARLIEST_BY' to arguments of type 'EARLIEST_BY(, )"); @@ -719,7 +720,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLatestByInvalidTimestamp() throws Exception + public void testLatestByInvalidTimestamp() { expectedException.expect(SqlPlanningException.class); expectedException.expectMessage("Cannot apply 'LATEST_BY' to arguments of type 'LATEST_BY(, )"); @@ -733,7 +734,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the on-heap version of the AnyAggregator (Double/Float/Long/String) @Test - public void testAnyAggregator() throws Exception + public void testAnyAggregator() { // Cannot vectorize virtual expressions. skipVectorize(); @@ -776,7 +777,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the on-heap version of the AnyAggregator (Double/Float/Long) against numeric columns // that have null values (when run in SQL compatible null mode) @Test - public void testAnyAggregatorsOnHeapNumericNulls() throws Exception + public void testAnyAggregatorsOnHeapNumericNulls() { testQuery( "SELECT ANY_VALUE(l1), ANY_VALUE(d1), ANY_VALUE(f1) FROM druid.numfoo", @@ -804,7 +805,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the AnyAggregator (Double/Float/Long) against numeric columns // that have null values (when run in SQL compatible null mode) @Test - public void testAnyAggregatorsOffHeapNumericNulls() throws Exception + public void testAnyAggregatorsOffHeapNumericNulls() { testQuery( "SELECT ANY_VALUE(l1), ANY_VALUE(d1), ANY_VALUE(f1) FROM druid.numfoo GROUP BY dim2", @@ -842,7 +843,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the LatestAggregator (Double/Float/Long) @Test - public void testPrimitiveLatestInSubquery() throws Exception + public void testPrimitiveLatestInSubquery() { testQuery( "SELECT SUM(val1), SUM(val2), SUM(val3) FROM (SELECT dim2, LATEST(m1) AS val1, LATEST(cnt) AS val2, LATEST(m2) AS val3 FROM foo GROUP BY dim2)", @@ -889,7 +890,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPrimitiveLatestInSubqueryGroupBy() throws Exception + public void testPrimitiveLatestInSubqueryGroupBy() { testQuery( "SELECT dim2, LATEST(m1) AS val1 FROM foo GROUP BY dim2", @@ -923,7 +924,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringLatestGroupBy() throws Exception + public void testStringLatestGroupBy() { testQuery( "SELECT dim2, LATEST(dim4,10) AS val1 FROM druid.numfoo GROUP BY dim2", @@ -957,7 +958,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the EarliestAggregator (Double/Float/Long) @Test - public void testPrimitiveEarliestInSubquery() throws Exception + public void testPrimitiveEarliestInSubquery() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1008,7 +1009,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the LatestAggregator (String) @Test - public void testStringLatestInSubquery() throws Exception + public void testStringLatestInSubquery() { testQuery( "SELECT SUM(val) FROM (SELECT dim2, LATEST(dim1, 10) AS val FROM foo GROUP BY dim2)", @@ -1056,7 +1057,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the EarliestAggregator (String) @Test - public void testStringEarliestInSubquery() throws Exception + public void testStringEarliestInSubquery() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1116,7 +1117,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the AnyAggregator (Double/Float/Long) @Test - public void testPrimitiveAnyInSubquery() throws Exception + public void testPrimitiveAnyInSubquery() { // The grouping works like this // dim2 -> m1 | m2 @@ -1170,7 +1171,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // This test the off-heap (buffer) version of the AnyAggregator (String) @Test - public void testStringAnyInSubquery() throws Exception + public void testStringAnyInSubquery() { testQuery( "SELECT SUM(val) FROM (SELECT dim2, ANY_VALUE(dim1, 10) AS val FROM foo GROUP BY dim2)", @@ -1216,7 +1217,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testEarliestAggregatorsNumericNulls() throws Exception + public void testEarliestAggregatorsNumericNulls() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1245,7 +1246,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLatestAggregatorsNumericNull() throws Exception + public void testLatestAggregatorsNumericNull() { testQuery( "SELECT LATEST(l1), LATEST(d1), LATEST(f1) FROM druid.numfoo", @@ -1275,7 +1276,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFirstLatestAggregatorsSkipNulls() throws Exception + public void testFirstLatestAggregatorsSkipNulls() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1320,7 +1321,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testAnyAggregatorsDoesNotSkipNulls() throws Exception + public void testAnyAggregatorsDoesNotSkipNulls() { testQuery( "SELECT ANY_VALUE(dim1, 32), ANY_VALUE(l2), ANY_VALUE(d2), ANY_VALUE(f2) FROM druid.numfoo", @@ -1348,7 +1349,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testAnyAggregatorsSkipNullsWithFilter() throws Exception + public void testAnyAggregatorsSkipNullsWithFilter() { final DimFilter filter; if (useDefault) { @@ -1390,7 +1391,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByEarliestFloat() throws Exception + public void testOrderByEarliestFloat() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1437,7 +1438,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByEarliestDouble() throws Exception + public void testOrderByEarliestDouble() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1484,7 +1485,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByEarliestLong() throws Exception + public void testOrderByEarliestLong() { // Cannot vectorize EARLIEST aggregator. skipVectorize(); @@ -1531,7 +1532,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByLatestFloat() throws Exception + public void testOrderByLatestFloat() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1577,7 +1578,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByLatestDouble() throws Exception + public void testOrderByLatestDouble() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1622,7 +1623,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByLatestLong() throws Exception + public void testOrderByLatestLong() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1667,7 +1668,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByAnyFloat() throws Exception + public void testOrderByAnyFloat() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1715,7 +1716,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByAnyDouble() throws Exception + public void testOrderByAnyDouble() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1762,7 +1763,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByAnyLong() throws Exception + public void testOrderByAnyLong() { List expected; if (NullHandling.replaceWithDefault()) { @@ -1809,7 +1810,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByLong() throws Exception + public void testGroupByLong() { testQuery( "SELECT cnt, COUNT(*) FROM druid.foo GROUP BY cnt", @@ -1830,7 +1831,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByOrdinal() throws Exception + public void testGroupByOrdinal() { testQuery( "SELECT cnt, COUNT(*) FROM druid.foo GROUP BY 1", @@ -1852,7 +1853,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest @Test @Ignore("Disabled since GROUP BY alias can confuse the validator; see DruidConformance::isGroupByAlias") - public void testGroupByAndOrderByAlias() throws Exception + public void testGroupByAndOrderByAlias() { testQuery( "SELECT cnt AS theCnt, COUNT(*) FROM druid.foo GROUP BY theCnt ORDER BY theCnt ASC", @@ -1885,7 +1886,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByExpressionAliasedAsOriginalColumnName() throws Exception + public void testGroupByExpressionAliasedAsOriginalColumnName() { testQuery( "SELECT\n" @@ -1910,7 +1911,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByAndOrderByOrdinalOfAlias() throws Exception + public void testGroupByAndOrderByOrdinalOfAlias() { testQuery( "SELECT cnt as theCnt, COUNT(*) FROM druid.foo GROUP BY 1 ORDER BY 1 ASC", @@ -1943,7 +1944,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByFloat() throws Exception + public void testGroupByFloat() { testQuery( "SELECT m1, COUNT(*) FROM druid.foo GROUP BY m1", @@ -1969,7 +1970,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDouble() throws Exception + public void testGroupByDouble() { testQuery( "SELECT m2, COUNT(*) FROM druid.foo GROUP BY m2", @@ -1995,7 +1996,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnFloat() throws Exception + public void testFilterOnFloat() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE m1 = 1.0", @@ -2016,7 +2017,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnDouble() throws Exception + public void testFilterOnDouble() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE m2 = 1.0", @@ -2037,7 +2038,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnGrandTotal() throws Exception + public void testHavingOnGrandTotal() { testQuery( "SELECT SUM(m1) AS m1_sum FROM foo HAVING m1_sum = 21", @@ -2058,7 +2059,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnDoubleSum() throws Exception + public void testHavingOnDoubleSum() { testQuery( "SELECT dim1, SUM(m1) AS m1_sum FROM druid.foo GROUP BY dim1 HAVING SUM(m1) > 1", @@ -2097,7 +2098,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnApproximateCountDistinct() throws Exception + public void testHavingOnApproximateCountDistinct() { testQuery( "SELECT dim2, COUNT(DISTINCT m1) FROM druid.foo GROUP BY dim2 HAVING COUNT(DISTINCT m1) > 1", @@ -2149,7 +2150,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnExactCountDistinct() throws Exception + public void testHavingOnExactCountDistinct() { testQuery( PLANNER_CONFIG_NO_HLL, @@ -2215,7 +2216,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctWithFilter() throws Exception + public void testExactCountDistinctWithFilter() throws IOException { final String sqlQuery = "SELECT COUNT(DISTINCT foo.dim1) FILTER(WHERE foo.cnt = 1), SUM(foo.cnt) FROM druid.foo"; @@ -2316,7 +2317,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnFloatSum() throws Exception + public void testHavingOnFloatSum() { testQuery( "SELECT dim1, CAST(SUM(m1) AS FLOAT) AS m1_sum FROM druid.foo GROUP BY dim1 HAVING CAST(SUM(m1) AS FLOAT) > 1", @@ -2355,7 +2356,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testColumnComparison() throws Exception + public void testColumnComparison() { testQuery( "SELECT dim1, m1, COUNT(*) FROM druid.foo WHERE m1 - 1 = dim1 GROUP BY dim1, m1", @@ -2385,7 +2386,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHavingOnRatio() throws Exception + public void testHavingOnRatio() { // Test for https://github.com/apache/druid/issues/4264 @@ -2426,7 +2427,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithSelectProjections() throws Exception + public void testGroupByWithSelectProjections() { testQuery( "SELECT\n" @@ -2458,7 +2459,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithSelectAndOrderByProjections() throws Exception + public void testGroupByWithSelectAndOrderByProjections() { testQuery( "SELECT\n" @@ -2509,7 +2510,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTopNWithSelectProjections() throws Exception + public void testTopNWithSelectProjections() { testQuery( "SELECT\n" @@ -2542,7 +2543,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTopNWithSelectAndOrderByProjections() throws Exception + public void testTopNWithSelectAndOrderByProjections() { testQuery( "SELECT\n" @@ -2579,7 +2580,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllQueries() throws Exception + public void testUnionAllQueries() { testQuery( "SELECT COUNT(*) FROM foo UNION ALL SELECT SUM(cnt) FROM foo UNION ALL SELECT COUNT(*) FROM foo", @@ -2611,7 +2612,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllQueriesWithLimit() throws Exception + public void testUnionAllQueriesWithLimit() { testQuery( "SELECT * FROM (" @@ -2638,7 +2639,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllDifferentTablesWithMapping() throws Exception + public void testUnionAllDifferentTablesWithMapping() { testQuery( "SELECT\n" @@ -2680,7 +2681,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinUnionAllDifferentTablesWithMapping() throws Exception + public void testJoinUnionAllDifferentTablesWithMapping() { testQuery( "SELECT\n" @@ -2722,7 +2723,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllTablesColumnCountMismatch() throws Exception + public void testUnionAllTablesColumnCountMismatch() { try { testQuery( @@ -2746,7 +2747,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllTablesColumnTypeMismatchFloatLong() throws Exception + public void testUnionAllTablesColumnTypeMismatchFloatLong() { // "m1" has a different type in foo and foo2 (float vs long), but this query is OK anyway because they can both // be implicitly cast to double. @@ -2851,7 +2852,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllSameTableTwice() throws Exception + public void testUnionAllSameTableTwice() { testQuery( "SELECT\n" @@ -2893,7 +2894,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllSameTableTwiceWithSameMapping() throws Exception + public void testUnionAllSameTableTwiceWithSameMapping() { testQuery( "SELECT\n" @@ -2950,7 +2951,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllSameTableThreeTimes() throws Exception + public void testUnionAllSameTableThreeTimes() { testQuery( "SELECT\n" @@ -2993,7 +2994,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllThreeTablesColumnCountMismatch1() throws Exception + public void testUnionAllThreeTablesColumnCountMismatch1() { try { testQuery( @@ -3017,7 +3018,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllThreeTablesColumnCountMismatch2() throws Exception + public void testUnionAllThreeTablesColumnCountMismatch2() { try { testQuery( @@ -3041,7 +3042,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllThreeTablesColumnCountMismatch3() throws Exception + public void testUnionAllThreeTablesColumnCountMismatch3() { try { testQuery( @@ -3065,7 +3066,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnionAllSameTableThreeTimesWithSameMapping() throws Exception + public void testUnionAllSameTableThreeTimesWithSameMapping() { testQuery( "SELECT\n" @@ -3108,7 +3109,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPruneDeadAggregators() throws Exception + public void testPruneDeadAggregators() { // Test for ProjectAggregatePruneUnusedCallRule. @@ -3134,7 +3135,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPruneDeadAggregatorsThroughPostProjection() throws Exception + public void testPruneDeadAggregatorsThroughPostProjection() { // Test for ProjectAggregatePruneUnusedCallRule. @@ -3161,7 +3162,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPruneDeadAggregatorsThroughHaving() throws Exception + public void testPruneDeadAggregatorsThroughHaving() { // Test for ProjectAggregatePruneUnusedCallRule. @@ -3189,7 +3190,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByCaseWhen() throws Exception + public void testGroupByCaseWhen() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -3242,7 +3243,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByCaseWhenOfTripleAnd() throws Exception + public void testGroupByCaseWhenOfTripleAnd() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -3278,7 +3279,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullEmptyStringEquality() throws Exception + public void testNullEmptyStringEquality() { testQuery( "SELECT COUNT(*)\n" @@ -3315,7 +3316,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullLongFilter() throws Exception + public void testNullLongFilter() { testQuery( "SELECT COUNT(*)\n" @@ -3353,7 +3354,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullDoubleFilter() throws Exception + public void testNullDoubleFilter() { testQuery( "SELECT COUNT(*)\n" @@ -3391,7 +3392,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullFloatFilter() throws Exception + public void testNullFloatFilter() { testQuery( "SELECT COUNT(*)\n" @@ -3429,7 +3430,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullDoubleTopN() throws Exception + public void testNullDoubleTopN() { List expected; if (useDefault) { @@ -3469,7 +3470,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullFloatTopN() throws Exception + public void testNullFloatTopN() { List expected; if (useDefault) { @@ -3509,7 +3510,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullLongTopN() throws Exception + public void testNullLongTopN() { List expected; if (useDefault) { @@ -3549,7 +3550,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLongPredicateIsNull() throws Exception + public void testLongPredicateIsNull() { testQuery( "SELECT l1 is null FROM druid.numfoo", @@ -3589,7 +3590,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLongPredicateFilterNulls() throws Exception + public void testLongPredicateFilterNulls() { testQuery( "SELECT COUNT(*)\n" @@ -3610,7 +3611,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testDoublePredicateFilterNulls() throws Exception + public void testDoublePredicateFilterNulls() { testQuery( "SELECT COUNT(*)\n" @@ -3631,7 +3632,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFloatPredicateFilterNulls() throws Exception + public void testFloatPredicateFilterNulls() { testQuery( "SELECT COUNT(*)\n" @@ -3652,7 +3653,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testEmptyStringEquality() throws Exception + public void testEmptyStringEquality() { if (NullHandling.replaceWithDefault()) { testQuery( @@ -3698,7 +3699,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNullStringEquality() throws Exception + public void testNullStringEquality() { testQuery( "SELECT COUNT(*)\n" @@ -3723,7 +3724,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCoalesceColumns() throws Exception + public void testCoalesceColumns() { // Doesn't conform to the SQL standard, but it's how we do it. // This example is used in the sql.md doc. @@ -3767,7 +3768,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testColumnIsNull() throws Exception + public void testColumnIsNull() { // Doesn't conform to the SQL standard, but it's how we do it. // This example is used in the sql.md doc. @@ -3791,7 +3792,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSelfJoin() throws Exception + public void testSelfJoin() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -3841,7 +3842,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingWithNullInFilter() throws Exception + public void testGroupingWithNullInFilter() { testQuery( "SELECT COUNT(*) FROM foo WHERE dim1 IN (NULL)", @@ -3872,7 +3873,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTwoExactCountDistincts() throws Exception + public void testTwoExactCountDistincts() { testQuery( PLANNER_CONFIG_NO_HLL, @@ -3948,7 +3949,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByNothingWithLiterallyFalseFilter() throws Exception + public void testGroupByNothingWithLiterallyFalseFilter() { testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE 1 = 0", @@ -3973,7 +3974,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByNothingWithImpossibleTimeFilter() throws Exception + public void testGroupByNothingWithImpossibleTimeFilter() { // Regression test for https://github.com/apache/druid/issues/7671 @@ -3997,7 +3998,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithImpossibleTimeFilter() throws Exception + public void testGroupByWithImpossibleTimeFilter() { // this gets optimized into 'false' testQuery( @@ -4020,7 +4021,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByOneColumnWithLiterallyFalseFilter() throws Exception + public void testGroupByOneColumnWithLiterallyFalseFilter() { testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE 1 = 0 GROUP BY dim1", @@ -4043,7 +4044,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithFilterMatchingNothing() throws Exception + public void testGroupByWithFilterMatchingNothing() { testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE dim1 = 'foobar'", @@ -4067,7 +4068,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithGroupByEmpty() throws Exception + public void testGroupByWithGroupByEmpty() { testQuery( "SELECT COUNT(*), SUM(cnt), MIN(cnt) FROM druid.foo GROUP BY ()", @@ -4089,7 +4090,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithFilterMatchingNothingWithGroupByLiteral() throws Exception + public void testGroupByWithFilterMatchingNothingWithGroupByLiteral() { testQuery( "SELECT COUNT(*), MAX(cnt) FROM druid.foo WHERE dim1 = 'foobar' GROUP BY 'dummy'", @@ -4111,7 +4112,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountNonNullColumn() throws Exception + public void testCountNonNullColumn() { testQuery( "SELECT COUNT(cnt) FROM druid.foo", @@ -4140,7 +4141,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountNullableColumn() throws Exception + public void testCountNullableColumn() { testQuery( "SELECT COUNT(dim2) FROM druid.foo", @@ -4169,7 +4170,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountNullableExpression() throws Exception + public void testCountNullableExpression() { testQuery( "SELECT COUNT(CASE WHEN dim2 = 'abc' THEN 'yes' WHEN dim2 = 'def' THEN 'yes' END) FROM druid.foo", @@ -4194,7 +4195,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStar() throws Exception + public void testCountStar() { testQuery( "SELECT COUNT(*) FROM druid.foo", @@ -4214,7 +4215,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarOnCommonTableExpression() throws Exception + public void testCountStarOnCommonTableExpression() { testQuery( "WITH beep (dim1_firstchar) AS (SELECT SUBSTRING(dim1, 1, 1) FROM foo WHERE dim2 = 'a')\n" @@ -4239,7 +4240,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarOnView() throws Exception + public void testCountStarOnView() { testQuery( "SELECT COUNT(*) FROM view.aview WHERE dim1_firstchar <> 'z'", @@ -4263,7 +4264,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testConfusedView() throws Exception + public void testConfusedView() { testQuery( "SELECT COUNT(*) FROM view.dview as druid WHERE druid.numfoo <> 'z'", @@ -4287,7 +4288,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testViewAndJoin() throws Exception + public void testViewAndJoin() { cannotVectorize(); Map queryContext = withLeftDirectAccessEnabled(QUERY_CONTEXT_DEFAULT); @@ -4338,7 +4339,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithLikeFilter() throws Exception + public void testCountStarWithLikeFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE dim1 like 'a%' OR dim2 like '%xb%' escape 'x'", @@ -4364,7 +4365,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithLongColumnFilters() throws Exception + public void testCountStarWithLongColumnFilters() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt >= 3 OR cnt = 1", @@ -4390,7 +4391,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception + public void testCountStarWithLongColumnFiltersOnFloatLiterals() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt > 1.1 and cnt < 100000001.0", @@ -4470,7 +4471,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithLongColumnFiltersOnTwoPoints() throws Exception + public void testCountStarWithLongColumnFiltersOnTwoPoints() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt = 1 OR cnt = 2", @@ -4491,7 +4492,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnStringAsNumber() throws Exception + public void testFilterOnStringAsNumber() { testQuery( "SELECT distinct dim1 FROM druid.foo WHERE " @@ -4529,7 +4530,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSimpleLongAggregations() throws Exception + public void testSimpleLongAggregations() { testQuery( "SELECT MIN(l1), MIN(cnt), MAX(l1) FROM druid.numfoo", @@ -4553,7 +4554,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSimpleDoubleAggregations() throws Exception + public void testSimpleDoubleAggregations() { testQuery( "SELECT MIN(d1), MAX(d1) FROM druid.numfoo", @@ -4576,7 +4577,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSimpleFloatAggregations() throws Exception + public void testSimpleFloatAggregations() { testQuery( "SELECT MIN(m1), MAX(m1) FROM druid.numfoo", @@ -4599,7 +4600,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSimpleAggregations() throws Exception + public void testSimpleAggregations() { testQuery( "SELECT COUNT(*), COUNT(cnt), COUNT(dim1), AVG(cnt), SUM(cnt), SUM(cnt) + MIN(cnt) + MAX(cnt), COUNT(dim2), COUNT(d1), AVG(d1) FROM druid.numfoo", @@ -4698,7 +4699,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithSortOnPostAggregationDefault() throws Exception + public void testGroupByWithSortOnPostAggregationDefault() { // By default this query uses topN. @@ -4729,7 +4730,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithSortOnPostAggregationNoTopNConfig() throws Exception + public void testGroupByWithSortOnPostAggregationNoTopNConfig() { // Use PlannerConfig to disable topN, so this query becomes a groupBy. testQuery( @@ -4772,7 +4773,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithSortOnPostAggregationNoTopNContext() throws Exception + public void testGroupByWithSortOnPostAggregationNoTopNContext() { // Use context to disable topN, so this query becomes a groupBy. @@ -4821,7 +4822,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilteredAggregations() throws Exception + public void testFilteredAggregations() { testQuery( "SELECT " @@ -4919,7 +4920,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCaseFilteredAggregationWithGroupBy() throws Exception + public void testCaseFilteredAggregationWithGroupBy() { testQuery( "SELECT\n" @@ -4951,7 +4952,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilteredAggregationWithNotIn() throws Exception + public void testFilteredAggregationWithNotIn() { testQuery( "SELECT\n" @@ -4992,7 +4993,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionAggregations() throws Exception + public void testExpressionAggregations() { // Cannot vectorize due to expressions. cannotVectorize(); @@ -5040,7 +5041,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionFilteringAndGrouping() throws Exception + public void testExpressionFilteringAndGrouping() { testQuery( "SELECT\n" @@ -5086,7 +5087,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionFilteringAndGroupingUsingCastToLong() throws Exception + public void testExpressionFilteringAndGroupingUsingCastToLong() { testQuery( "SELECT\n" @@ -5134,7 +5135,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionFilteringAndGroupingOnStringCastToNumber() throws Exception + public void testExpressionFilteringAndGroupingOnStringCastToNumber() { testQuery( "SELECT\n" @@ -5191,7 +5192,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInFilter() throws Exception + public void testInFilter() { testQuery( "SELECT dim1, COUNT(*) FROM druid.foo WHERE dim1 IN ('abc', 'def', 'ghi') GROUP BY dim1", @@ -5218,7 +5219,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSqlIsNullToInFilter() throws Exception + public void testSqlIsNullToInFilter() { testQuery( "SELECT dim1, COUNT(*) FROM druid.foo WHERE dim1 IS NULL OR dim1 = 'abc' OR dim1 = 'def' OR dim1 = 'ghi' " @@ -5250,7 +5251,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testInFilterWith23Elements() throws Exception + public void testInFilterWith23Elements() { // Regression test for https://github.com/apache/druid/issues/4203. @@ -5289,7 +5290,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithDegenerateFilter() throws Exception + public void testCountStarWithDegenerateFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE dim2 = 'a' and (dim1 > 'a' OR dim1 < 'b')", @@ -5312,7 +5313,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithNotOfDegenerateFilter() throws Exception + public void testCountStarWithNotOfDegenerateFilter() { // HashJoinSegmentStorageAdapter is not vectorizable cannotVectorize(); @@ -5368,7 +5369,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithBoundFilterSimplifyOnMetric() throws Exception + public void testCountStarWithBoundFilterSimplifyOnMetric() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE 2.5 < m1 AND m1 < 3.5", @@ -5389,7 +5390,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithBoundFilterSimplifyOr() throws Exception + public void testCountStarWithBoundFilterSimplifyOr() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE (dim1 >= 'a' and dim1 < 'b') OR dim1 = 'ab'", @@ -5434,7 +5435,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testArrayAggQueryOnComplexDatatypes() throws Exception + public void testArrayAggQueryOnComplexDatatypes() { try { testQuery("SELECT ARRAY_AGG(unique_dim1) FROM druid.foo", ImmutableList.of(), ImmutableList.of()); @@ -5450,7 +5451,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAggQueryOnComplexDatatypes() throws Exception + public void testStringAggQueryOnComplexDatatypes() { try { testQuery("SELECT STRING_AGG(unique_dim1, ',') FROM druid.foo", ImmutableList.of(), ImmutableList.of()); @@ -5466,7 +5467,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithBoundFilterSimplifyAnd() throws Exception + public void testCountStarWithBoundFilterSimplifyAnd() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE (dim1 >= 'a' and dim1 < 'b') and dim1 = 'abc'", @@ -5487,7 +5488,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithFilterOnCastedString() throws Exception + public void testCountStarWithFilterOnCastedString() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE CAST(dim1 AS bigint) = 2", @@ -5508,7 +5509,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeFilter() throws Exception + public void testCountStarWithTimeFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5529,7 +5530,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeInIntervalFilter() throws Exception + public void testCountStarWithTimeInIntervalFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5551,7 +5552,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeInIntervalFilterLosAngeles() throws Exception + public void testCountStarWithTimeInIntervalFilterLosAngeles() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5573,7 +5574,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeInIntervalFilterInvalidInterval() throws Exception + public void testCountStarWithTimeInIntervalFilterInvalidInterval() { testQueryThrows( "SELECT COUNT(*) FROM druid.foo " @@ -5589,7 +5590,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeInIntervalFilterNonLiteral() throws Exception + public void testCountStarWithTimeInIntervalFilterNonLiteral() { testQueryThrows( "SELECT COUNT(*) FROM druid.foo " @@ -5605,7 +5606,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithBetweenTimeFilterUsingMilliseconds() throws Exception + public void testCountStarWithBetweenTimeFilterUsingMilliseconds() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5626,7 +5627,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithBetweenTimeFilterUsingMillisecondsInStringLiterals() throws Exception + public void testCountStarWithBetweenTimeFilterUsingMillisecondsInStringLiterals() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5647,7 +5648,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRemoveUselessCaseWhen() throws Exception + public void testRemoveUselessCaseWhen() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -5675,7 +5676,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeMillisecondFilters() throws Exception + public void testCountStarWithTimeMillisecondFilters() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -5702,7 +5703,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeFilterUsingStringLiterals() throws Exception + public void testCountStarWithTimeFilterUsingStringLiterals() { // Strings are implicitly cast to timestamps. Test a few different forms. @@ -5745,7 +5746,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithSinglePointInTime() throws Exception + public void testCountStarWithSinglePointInTime() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE __time = TIMESTAMP '2000-01-01 00:00:00'", @@ -5765,7 +5766,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTwoPointsInTime() throws Exception + public void testCountStarWithTwoPointsInTime() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE " @@ -5791,7 +5792,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithComplexDisjointTimeFilter() throws Exception + public void testCountStarWithComplexDisjointTimeFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5831,7 +5832,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithNotOfComplexDisjointTimeFilter() throws Exception + public void testCountStarWithNotOfComplexDisjointTimeFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5872,7 +5873,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithNotTimeFilter() throws Exception + public void testCountStarWithNotTimeFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5902,7 +5903,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeAndDimFilter() throws Exception + public void testCountStarWithTimeAndDimFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5925,7 +5926,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeOrDimFilter() throws Exception + public void testCountStarWithTimeOrDimFilter() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -5961,7 +5962,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeFilterOnLongColumnUsingExtractEpoch() throws Exception + public void testCountStarWithTimeFilterOnLongColumnUsingExtractEpoch() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE " @@ -5994,7 +5995,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeFilterOnLongColumnUsingExtractEpochFromDate() throws Exception + public void testCountStarWithTimeFilterOnLongColumnUsingExtractEpochFromDate() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE " @@ -6027,7 +6028,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountStarWithTimeFilterOnLongColumnUsingTimestampToMillis() throws Exception + public void testCountStarWithTimeFilterOnLongColumnUsingTimestampToMillis() { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE " @@ -6060,7 +6061,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSumOfString() throws Exception + public void testSumOfString() { testQuery( "SELECT SUM(CAST(dim1 AS INTEGER)) FROM druid.foo", @@ -6090,7 +6091,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSumOfExtractionFn() throws Exception + public void testSumOfExtractionFn() { // Cannot vectorize due to expressions in aggregators. cannotVectorize(); @@ -6123,7 +6124,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesWithTimeFilterOnLongColumnUsingMillisToTimestamp() throws Exception + public void testTimeseriesWithTimeFilterOnLongColumnUsingMillisToTimestamp() { testQuery( "SELECT\n" @@ -6167,7 +6168,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinct() throws Exception + public void testCountDistinct() { testQuery( "SELECT SUM(cnt), COUNT(distinct dim2), COUNT(distinct unique_dim1) FROM druid.foo", @@ -6199,7 +6200,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinctOfCaseWhen() throws Exception + public void testCountDistinctOfCaseWhen() { testQuery( "SELECT\n" @@ -6250,7 +6251,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinct() throws Exception + public void testExactCountDistinct() { // When HLL is disabled, do exact count distinct through a nested query. @@ -6289,7 +6290,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctWhenHllDisabled() throws Exception + public void testApproxCountDistinctWhenHllDisabled() { // When HLL is disabled, APPROX_COUNT_DISTINCT is still approximate. @@ -6323,7 +6324,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctBuiltin() throws Exception + public void testApproxCountDistinctBuiltin() { testQuery( "SELECT APPROX_COUNT_DISTINCT_BUILTIN(dim2) FROM druid.foo", @@ -6353,7 +6354,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctWithGroupingAndOtherAggregators() throws Exception + public void testExactCountDistinctWithGroupingAndOtherAggregators() { // When HLL is disabled, do exact count distinct through a nested query. @@ -6407,7 +6408,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testMultipleExactCountDistinctWithGroupingAndOtherAggregators() throws Exception + public void testMultipleExactCountDistinctWithGroupingAndOtherAggregators() throws IOException { requireMergeBuffers(4); testQuery( @@ -6485,7 +6486,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinct() throws Exception + public void testApproxCountDistinct() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -6565,7 +6566,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testApproxCountDistinctOnVectorizableSingleStringExpression() throws Exception + public void testApproxCountDistinctOnVectorizableSingleStringExpression() { testQuery( "SELECT APPROX_COUNT_DISTINCT(dim1 || 'hello') FROM druid.foo", @@ -6596,7 +6597,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNestedGroupBy() throws Exception + public void testNestedGroupBy() { testQuery( "SELECT\n" @@ -6663,7 +6664,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testDoubleNestedGroupBy() throws Exception + public void testDoubleNestedGroupBy() throws IOException { requireMergeBuffers(3); testQuery( @@ -6718,7 +6719,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testDoubleNestedGroupBy2() throws Exception + public void testDoubleNestedGroupBy2() { // This test fails when AggregateMergeRule is added to Rules.ABSTRACT_RELATIONAL_RULES. So, we don't add that // rule for now. Possible bug in the rule. @@ -6770,7 +6771,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctUsingSubquery() throws Exception + public void testExactCountDistinctUsingSubquery() { testQuery( "SELECT\n" @@ -6811,7 +6812,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctUsingSubqueryOnUnionAllTables() throws Exception + public void testExactCountDistinctUsingSubqueryOnUnionAllTables() { testQuery( "SELECT\n" @@ -6863,7 +6864,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testMinMaxAvgDailyCountWithLimit() throws Exception + public void testMinMaxAvgDailyCountWithLimit() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -6940,7 +6941,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testAvgDailyCountDistinct() throws Exception + public void testAvgDailyCountDistinct() { // Cannot vectorize outer query due to inlined inner query. cannotVectorize(); @@ -7015,7 +7016,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctOfSemiJoinResult() throws Exception + public void testExactCountDistinctOfSemiJoinResult() { // Cannot vectorize due to extraction dimension spec. cannotVectorize(); @@ -7084,7 +7085,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testMaxSubqueryRows() throws Exception + public void testMaxSubqueryRows() { expectedException.expect(ResourceLimitExceededException.class); expectedException.expectMessage("Subquery generated results beyond maximum[2]"); @@ -7104,7 +7105,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testZeroMaxNumericInFilter() throws Exception + public void testZeroMaxNumericInFilter() { expectedException.expect(UOE.class); expectedException.expectMessage("[maxNumericInFilters] must be greater than 0"); @@ -7124,7 +7125,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHighestMaxNumericInFilter() throws Exception + public void testHighestMaxNumericInFilter() { expectedException.expect(UOE.class); expectedException.expectMessage("Expected parameter[maxNumericInFilters] cannot exceed system set value of [100]"); @@ -7144,7 +7145,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testQueryWithMoreThanMaxNumericInFilter() throws Exception + public void testQueryWithMoreThanMaxNumericInFilter() { expectedException.expect(UOE.class); expectedException.expectMessage("The number of values in the IN clause for [dim6] in query exceeds configured maxNumericFilter limit of [2] for INs. Cast [3] values of IN clause to String"); @@ -7164,7 +7165,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctUsingSubqueryWithWherePushDown() throws Exception + public void testExactCountDistinctUsingSubqueryWithWherePushDown() { testQuery( "SELECT\n" @@ -7246,7 +7247,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExactCountDistinctUsingSubqueryWithWhereToOuterFilter() throws Exception + public void testExactCountDistinctUsingSubqueryWithWhereToOuterFilter() { // Cannot vectorize topN operator. cannotVectorize(); @@ -7294,7 +7295,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCompareExactAndApproximateCountDistinctUsingSubquery() throws Exception + public void testCompareExactAndApproximateCountDistinctUsingSubquery() { testQuery( "SELECT\n" @@ -7343,7 +7344,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHistogramUsingSubquery() throws Exception + public void testHistogramUsingSubquery() { testQuery( "SELECT\n" @@ -7388,7 +7389,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHistogramUsingSubqueryWithSort() throws Exception + public void testHistogramUsingSubqueryWithSort() { testQuery( "SELECT\n" @@ -7442,7 +7443,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinctArithmetic() throws Exception + public void testCountDistinctArithmetic() { testQuery( "SELECT\n" @@ -7486,7 +7487,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinctOfSubstring() throws Exception + public void testCountDistinctOfSubstring() { // Cannot vectorize due to extraction dimension spec. cannotVectorize(); @@ -7526,7 +7527,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinctOfTrim() throws Exception + public void testCountDistinctOfTrim() { // Test a couple different syntax variants of TRIM. @@ -7563,7 +7564,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSillyQuarters() throws Exception + public void testSillyQuarters() { // Like FLOOR(__time TO QUARTER) but silly. @@ -7596,7 +7597,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRegexpExtract() throws Exception + public void testRegexpExtract() { // Cannot vectorize due to extractionFn in dimension spec. cannotVectorize(); @@ -7647,7 +7648,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRegexpExtractFilterViaNotNullCheck() throws Exception + public void testRegexpExtractFilterViaNotNullCheck() { // Cannot vectorize due to extractionFn in dimension spec. cannotVectorize(); @@ -7681,7 +7682,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRegexpLikeFilter() throws Exception + public void testRegexpLikeFilter() { testQuery( "SELECT COUNT(*)\n" @@ -7712,7 +7713,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupBySortPushDown() throws Exception + public void testGroupBySortPushDown() { testQuery( "SELECT dim2, dim1, SUM(cnt) FROM druid.foo GROUP BY dim2, dim1 ORDER BY dim1 LIMIT 4", @@ -7753,7 +7754,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByLimitPushDownWithHavingOnLong() throws Exception + public void testGroupByLimitPushDownWithHavingOnLong() { testQuery( "SELECT dim1, dim2, SUM(cnt) AS thecnt " @@ -7807,7 +7808,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByLimitPushdownExtraction() throws Exception + public void testGroupByLimitPushdownExtraction() { cannotVectorize(); @@ -7848,7 +7849,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeFloor() throws Exception + public void testFilterOnTimeFloor() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -7871,7 +7872,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupAndFilterOnTimeFloorWithTimeZone() throws Exception + public void testGroupAndFilterOnTimeFloorWithTimeZone() { testQuery( "SELECT TIME_FLOOR(__time, 'P1M', NULL, 'America/Los_Angeles'), COUNT(*)\n" @@ -7904,7 +7905,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnCurrentTimestampWithIntervalArithmetic() throws Exception + public void testFilterOnCurrentTimestampWithIntervalArithmetic() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -7927,7 +7928,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnCurrentTimestampLosAngeles() throws Exception + public void testFilterOnCurrentTimestampLosAngeles() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -7951,7 +7952,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnCurrentTimestampOnView() throws Exception + public void testFilterOnCurrentTimestampOnView() { testQuery( "SELECT * FROM view.bview", @@ -7971,7 +7972,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnCurrentTimestampLosAngelesOnView() throws Exception + public void testFilterOnCurrentTimestampLosAngelesOnView() { // Tests that query context still applies to view SQL; note the result is different from // "testFilterOnCurrentTimestampOnView" above. @@ -7997,7 +7998,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnNotTimeFloor() throws Exception + public void testFilterOnNotTimeFloor() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -8022,7 +8023,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeFloorComparison() throws Exception + public void testFilterOnTimeFloorComparison() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -8044,7 +8045,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeFloorComparisonMisaligned() throws Exception + public void testFilterOnTimeFloorComparisonMisaligned() { testQuery( "SELECT COUNT(*) FROM druid.foo\n" @@ -8066,7 +8067,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeExtract() throws Exception + public void testFilterOnTimeExtract() { // Cannot vectorize due to expression filter. cannotVectorize(); @@ -8101,7 +8102,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeExtractWithMultipleDays() throws Exception + public void testFilterOnTimeExtractWithMultipleDays() { // Cannot vectorize due to expression filters. cannotVectorize(); @@ -8144,7 +8145,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeExtractWithVariousTimeUnits() throws Exception + public void testFilterOnTimeExtractWithVariousTimeUnits() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -8206,7 +8207,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterOnTimeFloorMisaligned() throws Exception + public void testFilterOnTimeFloorMisaligned() { testQuery( "SELECT COUNT(*) FROM druid.foo " @@ -8225,7 +8226,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByFloor() throws Exception + public void testGroupByFloor() { testQuery( "SELECT floor(CAST(dim1 AS float)), COUNT(*) FROM druid.foo GROUP BY floor(CAST(dim1 AS float))", @@ -8252,7 +8253,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws Exception + public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws IOException { cannotVectorize(); requireMergeBuffers(3); @@ -8353,7 +8354,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByFloorWithOrderBy() throws Exception + public void testGroupByFloorWithOrderBy() { testQuery( "SELECT floor(CAST(dim1 AS float)) AS fl, COUNT(*) FROM druid.foo GROUP BY floor(CAST(dim1 AS float)) ORDER BY fl DESC", @@ -8404,7 +8405,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() throws Exception + public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() { testQuery( "SELECT floor(__time TO year), dim2, COUNT(*)" @@ -8479,7 +8480,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByStringLength() throws Exception + public void testGroupByStringLength() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -8507,7 +8508,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterAndGroupByLookup() throws Exception + public void testFilterAndGroupByLookup() { // Cannot vectorize due to extraction dimension specs. cannotVectorize(); @@ -8563,7 +8564,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountDistinctOfLookup() throws Exception + public void testCountDistinctOfLookup() { // Cannot vectorize due to extraction dimension spec. cannotVectorize(); @@ -8603,7 +8604,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByExpressionFromLookup() throws Exception + public void testGroupByExpressionFromLookup() { // Cannot vectorize direct queries on lookup tables. cannotVectorize(); @@ -8636,7 +8637,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseries() throws Exception + public void testTimeseries() { testQuery( "SELECT SUM(cnt), gran FROM (\n" @@ -8662,7 +8663,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilteredTimeAggregators() throws Exception + public void testFilteredTimeAggregators() { testQuery( "SELECT\n" @@ -8729,7 +8730,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesLosAngelesViaQueryContext() throws Exception + public void testTimeseriesLosAngelesViaQueryContext() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -8760,7 +8761,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesLosAngelesViaPlannerConfig() throws Exception + public void testTimeseriesLosAngelesViaPlannerConfig() { testQuery( PLANNER_CONFIG_LOS_ANGELES, @@ -8794,7 +8795,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingTimeFloor() throws Exception + public void testTimeseriesUsingTimeFloor() { testQuery( "SELECT SUM(cnt), gran FROM (\n" @@ -8820,7 +8821,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingTimeFloorWithTimeShift() throws Exception + public void testTimeseriesUsingTimeFloorWithTimeShift() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -8871,7 +8872,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingTimeFloorWithTimestampAdd() throws Exception + public void testTimeseriesUsingTimeFloorWithTimestampAdd() { testQuery( "SELECT SUM(cnt), gran FROM (\n" @@ -8919,7 +8920,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingTimeFloorWithOrigin() throws Exception + public void testTimeseriesUsingTimeFloorWithOrigin() { testQuery( "SELECT SUM(cnt), gran FROM (\n" @@ -8953,7 +8954,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesLosAngelesUsingTimeFloorConnectionUtc() throws Exception + public void testTimeseriesLosAngelesUsingTimeFloorConnectionUtc() { testQuery( "SELECT SUM(cnt), gran FROM (\n" @@ -8981,7 +8982,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesLosAngelesUsingTimeFloorConnectionLosAngeles() throws Exception + public void testTimeseriesLosAngelesUsingTimeFloorConnectionLosAngeles() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -9012,7 +9013,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesDontSkipEmptyBuckets() throws Exception + public void testTimeseriesDontSkipEmptyBuckets() { // Tests that query context parameters are passed through to the underlying query engine. Long defaultVal = NullHandling.replaceWithDefault() ? 0L : null; @@ -9065,7 +9066,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingCastAsDate() throws Exception + public void testTimeseriesUsingCastAsDate() { testQuery( "SELECT SUM(cnt), dt FROM (\n" @@ -9095,7 +9096,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesUsingFloorPlusCastAsDate() throws Exception + public void testTimeseriesUsingFloorPlusCastAsDate() { testQuery( "SELECT SUM(cnt), dt FROM (\n" @@ -9121,7 +9122,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesDescending() throws Exception + public void testTimeseriesDescending() { // Cannot vectorize due to descending order. cannotVectorize(); @@ -9151,7 +9152,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesEmptyResultsAggregatorDefaultValues() throws Exception + public void testTimeseriesEmptyResultsAggregatorDefaultValues() { // timeseries with all granularity have a single group, so should return default results for given aggregators testQuery( @@ -9256,7 +9257,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesEmptyResultsAggregatorDefaultValuesNonVectorized() throws Exception + public void testTimeseriesEmptyResultsAggregatorDefaultValuesNonVectorized() { cannotVectorize(); // timeseries with all granularity have a single group, so should return default results for given aggregators @@ -9394,7 +9395,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValues() throws Exception + public void testGroupByAggregatorDefaultValues() { testQuery( "SELECT\n" @@ -9542,7 +9543,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByAggregatorDefaultValuesNonVectorized() throws Exception + public void testGroupByAggregatorDefaultValuesNonVectorized() { cannotVectorize(); testQuery( @@ -9706,7 +9707,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByExtractYear() throws Exception + public void testGroupByExtractYear() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -9755,7 +9756,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByFormatYearAndMonth() throws Exception + public void testGroupByFormatYearAndMonth() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -9804,7 +9805,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByExtractFloorTime() throws Exception + public void testGroupByExtractFloorTime() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -9839,7 +9840,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByExtractFloorTimeLosAngeles() throws Exception + public void testGroupByExtractFloorTimeLosAngeles() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -9878,7 +9879,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesWithLimitNoTopN() throws Exception + public void testTimeseriesWithLimitNoTopN() { testQuery( PLANNER_CONFIG_NO_TOPN, @@ -9908,7 +9909,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesWithLimit() throws Exception + public void testTimeseriesWithLimit() { testQuery( "SELECT gran, SUM(cnt)\n" @@ -9935,7 +9936,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesWithLimitAndOffset() throws Exception + public void testTimeseriesWithLimitAndOffset() { // Timeseries cannot handle offsets, so the query morphs into a groupBy. testQuery( @@ -9972,7 +9973,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeseriesWithOrderByAndLimit() throws Exception + public void testTimeseriesWithOrderByAndLimit() { testQuery( "SELECT gran, SUM(cnt)\n" @@ -10000,7 +10001,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByTimeAndOtherDimension() throws Exception + public void testGroupByTimeAndOtherDimension() { testQuery( "SELECT dim2, gran, SUM(cnt)\n" @@ -10062,7 +10063,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByTimeFloorAndDimOnGroupByTimeFloorAndDim() throws Exception + public void testGroupByTimeFloorAndDimOnGroupByTimeFloorAndDim() { testQuery( "SELECT dim2, time_floor(gran, 'P1M') gran, sum(s)\n" @@ -10164,7 +10165,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSets() throws Exception + public void testGroupingSets() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10228,7 +10229,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingAggregatorDifferentOrder() throws Exception + public void testGroupingAggregatorDifferentOrder() throws IOException { requireMergeBuffers(3); @@ -10294,7 +10295,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingAggregatorWithPostAggregator() throws Exception + public void testGroupingAggregatorWithPostAggregator() { List resultList; if (NullHandling.sqlCompatible()) { @@ -10352,7 +10353,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithNumericDimension() throws Exception + public void testGroupingSetsWithNumericDimension() { testQuery( "SELECT cnt, COUNT(*)\n" @@ -10382,7 +10383,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRollup() throws Exception + public void testGroupByRollup() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10440,7 +10441,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByRollupDifferentOrder() throws Exception + public void testGroupByRollupDifferentOrder() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10498,7 +10499,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByCube() throws Exception + public void testGroupByCube() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10559,7 +10560,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithDummyDimension() throws Exception + public void testGroupingSetsWithDummyDimension() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10620,7 +10621,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsNoSuperset() throws Exception + public void testGroupingSetsNoSuperset() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10676,7 +10677,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithOrderByDimension() throws Exception + public void testGroupingSetsWithOrderByDimension() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10749,7 +10750,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithOrderByAggregator() throws Exception + public void testGroupingSetsWithOrderByAggregator() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10817,7 +10818,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithOrderByAggregatorWithLimit() throws Exception + public void testGroupingSetsWithOrderByAggregatorWithLimit() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -10881,7 +10882,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeExtractWithTooFewArguments() throws Exception + public void testTimeExtractWithTooFewArguments() { // Regression test for https://github.com/apache/druid/pull/7710. try { @@ -10898,7 +10899,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception + public void testUsingSubqueryAsFilterOnTwoColumns() { testQuery( "SELECT __time, cnt, dim1, dim2 FROM druid.foo " @@ -10957,7 +10958,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUsingSubqueryAsFilterWithInnerSort() throws Exception + public void testUsingSubqueryAsFilterWithInnerSort() { // Regression test for https://github.com/apache/druid/issues/4208 @@ -11009,7 +11010,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUsingSubqueryWithLimit() throws Exception + public void testUsingSubqueryWithLimit() { // Cannot vectorize scan query. cannotVectorize(); @@ -11040,7 +11041,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUsingSubqueryWithoutLimit() throws Exception + public void testUsingSubqueryWithoutLimit() { testQuery( "SELECT COUNT(*) AS cnt FROM ( SELECT * FROM druid.foo ) tmpA", @@ -11060,7 +11061,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testUnicodeFilterAndGroupBy() throws Exception + public void testUnicodeFilterAndGroupBy() { testQuery( "SELECT\n" @@ -11097,7 +11098,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByAlongWithAliasOrderByTimeGroupByMulti() throws Exception + public void testOrderByAlongWithAliasOrderByTimeGroupByMulti() { testQuery( "select __time as bug, dim2 from druid.foo group by 1, 2 order by 1 limit 1", @@ -11130,7 +11131,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderByAlongWithAliasOrderByTimeGroupByOneCol() throws Exception + public void testOrderByAlongWithAliasOrderByTimeGroupByOneCol() { testQuery( "select __time as bug from druid.foo group by 1 order by 1 limit 1", @@ -11154,7 +11155,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testProjectAfterSort() throws Exception + public void testProjectAfterSort() { testQuery( "select dim1 from (select dim1, dim2, count(*) cnt from druid.foo group by dim1, dim2 order by cnt)", @@ -11184,7 +11185,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testProjectAfterSort2() throws Exception + public void testProjectAfterSort2() { testQuery( "select s / cnt, dim1, dim2, s from (select dim1, dim2, count(*) cnt, sum(m2) s from druid.foo group by dim1, dim2 order by cnt)", @@ -11223,7 +11224,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest @Test @Ignore("In Calcite 1.17, this test worked, but after upgrading to Calcite 1.21, this query fails with:" + " org.apache.calcite.sql.validate.SqlValidatorException: Column 'dim1' is ambiguous") - public void testProjectAfterSort3() throws Exception + public void testProjectAfterSort3() { testQuery( "select dim1 from (select dim1, dim1, count(*) cnt from druid.foo group by dim1, dim1 order by cnt)", @@ -11261,7 +11262,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testProjectAfterSort3WithoutAmbiguity() throws Exception + public void testProjectAfterSort3WithoutAmbiguity() { // This query is equivalent to the one in testProjectAfterSort3 but renames the second grouping column // to avoid the ambiguous name exception. The inner sort is also optimized out in Calcite 1.21. @@ -11292,7 +11293,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSortProjectAfterNestedGroupBy() throws Exception + public void testSortProjectAfterNestedGroupBy() { testQuery( "SELECT " @@ -11372,7 +11373,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPostAggWithTimeseries() throws Exception + public void testPostAggWithTimeseries() { // Cannot vectorize due to descending order. cannotVectorize(); @@ -11415,7 +11416,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPostAggWithTopN() throws Exception + public void testPostAggWithTopN() { testQuery( "SELECT " @@ -11477,7 +11478,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testConcat() throws Exception + public void testConcat() { testQuery( "SELECT CONCAT(dim1, '-', dim1, '_', dim1) as dimX FROM foo", @@ -11533,7 +11534,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testConcatGroup() throws Exception + public void testConcatGroup() { testQuery( "SELECT CONCAT(dim1, '-', dim1, '_', dim1) as dimX FROM foo GROUP BY 1", @@ -11602,7 +11603,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTextcat() throws Exception + public void testTextcat() { testQuery( "SELECT textcat(dim1, dim1) as dimX FROM foo", @@ -11654,7 +11655,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRequireTimeConditionPositive() throws Exception + public void testRequireTimeConditionPositive() { // simple timeseries testQuery( @@ -11796,7 +11797,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRequireTimeConditionLogicalValuePositive() throws Exception + public void testRequireTimeConditionLogicalValuePositive() { testQuery( PLANNER_CONFIG_REQUIRE_TIME_CONDITION, @@ -11819,7 +11820,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRequireTimeConditionSimpleQueryNegative() throws Exception + public void testRequireTimeConditionSimpleQueryNegative() { expectedException.expect(CannotBuildQueryException.class); expectedException.expectMessage("__time column"); @@ -11839,7 +11840,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRequireTimeConditionSubQueryNegative() throws Exception + public void testRequireTimeConditionSubQueryNegative() { expectedException.expect(CannotBuildQueryException.class); expectedException.expectMessage("__time column"); @@ -11857,7 +11858,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRequireTimeConditionSemiJoinNegative() throws Exception + public void testRequireTimeConditionSemiJoinNegative() { expectedException.expect(CannotBuildQueryException.class); expectedException.expectMessage("__time column"); @@ -11876,7 +11877,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterFloatDimension() throws Exception + public void testFilterFloatDimension() { testQuery( "SELECT dim1 FROM numfoo WHERE f1 = 0.1 LIMIT 1", @@ -11898,7 +11899,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterDoubleDimension() throws Exception + public void testFilterDoubleDimension() { testQuery( "SELECT dim1 FROM numfoo WHERE d1 = 1.7 LIMIT 1", @@ -11920,7 +11921,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testFilterLongDimension() throws Exception + public void testFilterLongDimension() { testQuery( "SELECT dim1 FROM numfoo WHERE l1 = 7 LIMIT 1", @@ -11942,7 +11943,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTrigonometricFunction() throws Exception + public void testTrigonometricFunction() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -11991,7 +11992,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRadiansAndDegrees() throws Exception + public void testRadiansAndDegrees() { testQuery( "SELECT RADIANS(m1 * 15)/DEGREES(m2) FROM numfoo WHERE dim1 = '1'", @@ -12015,7 +12016,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimestampDiff() throws Exception + public void testTimestampDiff() { testQuery( "SELECT TIMESTAMPDIFF(DAY, TIMESTAMP '1999-01-01 00:00:00', __time), \n" @@ -12067,7 +12068,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimestampCeil() throws Exception + public void testTimestampCeil() { testQuery( "SELECT CEIL(TIMESTAMP '2000-01-01 00:00:00' TO DAY), \n" @@ -12105,7 +12106,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testNvlColumns() throws Exception + public void testNvlColumns() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -12146,7 +12147,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByWithLiteralInSubqueryGrouping() throws Exception + public void testGroupByWithLiteralInSubqueryGrouping() { testQuery( "SELECT \n" @@ -12209,7 +12210,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLeftRightStringOperators() throws Exception + public void testLeftRightStringOperators() { testQuery( "SELECT\n" @@ -12243,7 +12244,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testQueryContextOuterLimit() throws Exception + public void testQueryContextOuterLimit() { Map outerLimitContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); outerLimitContext.put(PlannerContext.CTX_SQL_OUTER_LIMIT, 4); @@ -12333,7 +12334,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRepeatedIdenticalVirtualExpressionGrouping() throws Exception + public void testRepeatedIdenticalVirtualExpressionGrouping() { final String query = "SELECT \n" + "\tCASE dim1 WHEN NULL THEN FALSE ELSE TRUE END AS col_a,\n" @@ -12365,7 +12366,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testValidationErrorNullLiteralIllegal() throws Exception + public void testValidationErrorNullLiteralIllegal() { expectedException.expectMessage("Illegal use of 'NULL'"); @@ -12377,7 +12378,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testValidationErrorNonLiteralIllegal() throws Exception + public void testValidationErrorNonLiteralIllegal() { expectedException.expectMessage("Argument to function 'REGEXP_LIKE' must be a literal"); @@ -12389,7 +12390,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testValidationErrorWrongTypeLiteral() throws Exception + public void testValidationErrorWrongTypeLiteral() { expectedException.expectMessage("Cannot apply 'REGEXP_LIKE' to arguments"); @@ -12401,7 +12402,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeStampAddZeroDayPeriod() throws Exception + public void testTimeStampAddZeroDayPeriod() { testQuery( "SELECT TIMESTAMPADD(DAY, 0, \"__time\") FROM druid.foo", @@ -12426,7 +12427,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeStampAddZeroMonthPeriod() throws Exception + public void testTimeStampAddZeroMonthPeriod() { testQuery( "SELECT TIMESTAMPADD(MONTH, 0, \"__time\") FROM druid.foo", @@ -12455,7 +12456,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testTimeStampAddZeroYearPeriod() throws Exception + public void testTimeStampAddZeroYearPeriod() { skipVectorize(); @@ -12490,7 +12491,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest * see https://github.com/apache/druid/issues/10530 for more information */ @Test - public void testTimeStampAddConversion() throws Exception + public void testTimeStampAddConversion() { final PeriodGranularity periodGranularity = new PeriodGranularity(new Period("P1M"), null, null); @@ -12553,7 +12554,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithLimit() throws Exception + public void testGroupingSetsWithLimit() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -12620,7 +12621,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupingSetsWithLimitOrderByGran() throws Exception + public void testGroupingSetsWithLimitOrderByGran() { // Cannot vectorize due to virtual columns. cannotVectorize(); @@ -12695,7 +12696,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testLookupWithNull() throws Exception + public void testLookupWithNull() { List expected; if (useDefault) { @@ -12731,7 +12732,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testRoundFunc() throws Exception + public void testRoundFunc() { testQuery( @@ -12770,7 +12771,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCountAndAverageByConstantVirtualColumn() throws Exception + public void testCountAndAverageByConstantVirtualColumn() { List virtualColumns; List aggs; @@ -12843,7 +12844,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testEmptyGroupWithOffsetDoesntInfiniteLoop() throws Exception + public void testEmptyGroupWithOffsetDoesntInfiniteLoop() { testQuery( "SELECT r0.c, r1.c\n" @@ -12904,7 +12905,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testJoinWithTimeDimension() throws Exception + public void testJoinWithTimeDimension() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -12939,7 +12940,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionCounts() throws Exception + public void testExpressionCounts() { cannotVectorize(); testQuery( @@ -12988,7 +12989,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testBitwiseAggregatorsTimeseries() throws Exception + public void testBitwiseAggregatorsTimeseries() { cannotVectorize(); testQuery( @@ -13075,7 +13076,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testBitwiseAggregatorsGroupBy() throws Exception + public void testBitwiseAggregatorsGroupBy() { cannotVectorize(); testQuery( @@ -13182,7 +13183,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAgg() throws Exception + public void testStringAgg() { cannotVectorize(); testQuery( @@ -13269,7 +13270,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAggMultiValue() throws Exception + public void testStringAggMultiValue() { cannotVectorize(); testQuery( @@ -13333,7 +13334,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAggNumeric() throws Exception + public void testStringAggNumeric() { cannotVectorize(); testQuery( @@ -13487,7 +13488,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAggExpression() throws Exception + public void testStringAggExpression() { cannotVectorize(); testQuery( @@ -13554,7 +13555,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test(expected = RelOptPlanner.CannotPlanException.class) - public void testStringAggExpressionNonConstantSeparator() throws Exception + public void testStringAggExpressionNonConstantSeparator() { testQuery( "SELECT STRING_AGG(DISTINCT CONCAT(dim1, dim2), CONCAT('|', dim1)) FROM foo", @@ -13564,7 +13565,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testStringAggMaxBytes() throws Exception + public void testStringAggMaxBytes() { cannotVectorize(); testQuery( @@ -13631,7 +13632,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest * see {@link CalciteTests#RAW_ROWS1_WITH_NUMERIC_DIMS} for the input data source of this test */ @Test - public void testHumanReadableFormatFunction() throws Exception + public void testHumanReadableFormatFunction() { // For the row where dim1 = '1', m1 = 4.0 and l1 is null testQuery( @@ -13699,7 +13700,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHumanReadableFormatFunctionExceptionWithWrongNumberType() throws Exception + public void testHumanReadableFormatFunctionExceptionWithWrongNumberType() { this.expectedException.expect(SqlPlanningException.class); this.expectedException.expectMessage("Supported form(s): HUMAN_READABLE_BINARY_BYTE_FORMAT(Number, [Precision])"); @@ -13711,7 +13712,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHumanReadableFormatFunctionWithWrongPrecisionType() throws Exception + public void testHumanReadableFormatFunctionWithWrongPrecisionType() { this.expectedException.expect(SqlPlanningException.class); this.expectedException.expectMessage("Supported form(s): HUMAN_READABLE_BINARY_BYTE_FORMAT(Number, [Precision])"); @@ -13723,7 +13724,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testHumanReadableFormatFunctionWithInvalidNumberOfArguments() throws Exception + public void testHumanReadableFormatFunctionWithInvalidNumberOfArguments() { this.expectedException.expect(SqlPlanningException.class); @@ -13745,7 +13746,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testCommonVirtualExpressionWithDifferentValueType() throws Exception + public void testCommonVirtualExpressionWithDifferentValueType() { testQuery( "select\n" @@ -13785,7 +13786,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest // When optimization in Grouping#applyProject is applied, and it reduces a Group By query to a timeseries, we // want it to return empty bucket if no row matches @Test - public void testReturnEmptyRowWhenGroupByIsConvertedToTimeseriesWithSingleConstantDimension() throws Exception + public void testReturnEmptyRowWhenGroupByIsConvertedToTimeseriesWithSingleConstantDimension() { skipVectorize(); testQuery( @@ -13837,7 +13838,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testReturnEmptyRowWhenGroupByIsConvertedToTimeseriesWithMultipleConstantDimensions() throws Exception + public void testReturnEmptyRowWhenGroupByIsConvertedToTimeseriesWithMultipleConstantDimensions() { skipVectorize(); testQuery( @@ -13899,7 +13900,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testPlanWithInFilterLessThanInSubQueryThreshold() throws Exception + public void testPlanWithInFilterLessThanInSubQueryThreshold() { String query = "SELECT l1 FROM numfoo WHERE l1 IN (4842, 4844, 4845, 14905, 4853, 29064)"; @@ -13934,12 +13935,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGreatestFunctionForNumberWithIsNull() throws Exception + public void testGreatestFunctionForNumberWithIsNull() { String query = "SELECT dim1, MAX(GREATEST(l1, l2)) IS NULL FROM druid.numfoo GROUP BY dim1"; List expectedResult; - List expectedQueries; + List> expectedQueries; if (NullHandling.replaceWithDefault()) { expectedResult = ImmutableList.of( @@ -14000,7 +14001,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testGreatestFunctionForStringWithIsNull() throws Exception + public void testGreatestFunctionForStringWithIsNull() { cannotVectorize(); @@ -14043,7 +14044,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest } @Test - public void testSubqueryTypeMismatchWithLiterals() throws Exception + public void testSubqueryTypeMismatchWithLiterals() { testQuery( "SELECT \n" diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java index 521b8989ea7..b0c1f37e2f3 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java @@ -44,6 +44,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests; import org.junit.Assert; import org.junit.Test; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -572,7 +573,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest } @Test - public void testReplaceWithPartitionedByContainingInvalidGranularity() throws Exception + public void testReplaceWithPartitionedByContainingInvalidGranularity() { // Throws a ValidationException, which gets converted to a SqlPlanningException before throwing to end user try { @@ -593,7 +594,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest } @Test - public void testExplainReplaceFromExternal() throws Exception + public void testExplainReplaceFromExternal() throws IOException { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java index 4f39dff99f0..7e9c96d6a00 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java @@ -59,7 +59,7 @@ import java.util.Map; public class CalciteSelectQueryTest extends BaseCalciteQueryTest { @Test - public void testSelectConstantExpression() throws Exception + public void testSelectConstantExpression() { // Test with a Druid-specific function, to make sure they are hooked up correctly even when not selecting // from a table. @@ -95,7 +95,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testExpressionContainingNull() throws Exception + public void testExpressionContainingNull() { testQuery( "SELECT ARRAY ['Hello', NULL]", @@ -127,7 +127,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testValuesContainingNull() throws Exception + public void testValuesContainingNull() { testQuery( "SELECT * FROM (VALUES (NULL, 'United States'))", @@ -155,7 +155,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testMultipleValuesContainingNull() throws Exception + public void testMultipleValuesContainingNull() { testQuery( "SELECT * FROM (VALUES (NULL, 'United States'), ('Delhi', 'India'))", @@ -183,7 +183,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testMultipleValuesContainingNullAndIntegerValues() throws Exception + public void testMultipleValuesContainingNullAndIntegerValues() { testQuery( "SELECT * FROM (VALUES (NULL, 'United States'), (50, 'India'))", @@ -211,7 +211,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectNonNumericNumberLiterals() throws Exception + public void testSelectNonNumericNumberLiterals() { // Tests to convert NaN, positive infinity and negative infinity as literals. testQuery( @@ -258,7 +258,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest // Test that the integers are getting correctly casted after being passed through a function when not selecting from // a table @Test - public void testDruidLogicalValuesRule() throws Exception + public void testDruidLogicalValuesRule() { testQuery( "SELECT FLOOR(123), CEIL(123), CAST(123.0 AS INTEGER)", @@ -287,7 +287,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectConstantExpressionFromTable() throws Exception + public void testSelectConstantExpressionFromTable() { testQuery( "SELECT 1 + 1, dim1 FROM foo LIMIT 1", @@ -309,7 +309,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectConstantExpressionEquivalentToNaN() throws Exception + public void testSelectConstantExpressionEquivalentToNaN() { expectedException.expectMessage( "'(log10(0) - log10(0))' evaluates to 'NaN' that is not supported in SQL. You can either cast the expression as bigint ('cast((log10(0) - log10(0)) as bigint)') or char ('cast((log10(0) - log10(0)) as char)') or change the expression itself"); @@ -321,7 +321,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectConstantExpressionEquivalentToInfinity() throws Exception + public void testSelectConstantExpressionEquivalentToInfinity() { expectedException.expectMessage( "'log10(0)' evaluates to '-Infinity' that is not supported in SQL. You can either cast the expression as bigint ('cast(log10(0) as bigint)') or char ('cast(log10(0) as char)') or change the expression itself"); @@ -333,7 +333,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectTrimFamily() throws Exception + public void testSelectTrimFamily() { // TRIM has some whacky parsing. Make sure the different forms work. @@ -381,7 +381,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectPadFamily() throws Exception + public void testSelectPadFamily() { testQuery( "SELECT\n" @@ -417,7 +417,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testBitwiseExpressions() throws Exception + public void testBitwiseExpressions() { List expected; if (useDefault) { @@ -475,7 +475,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSafeDivideExpressions() throws Exception + public void testSafeDivideExpressions() { List expected; if (useDefault) { @@ -525,7 +525,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainSelectConstantExpression() throws Exception + public void testExplainSelectConstantExpression() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -569,7 +569,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarWithDimFilter() throws Exception + public void testSelectStarWithDimFilter() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -600,7 +600,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithCascadeExtractionFilter() throws Exception + public void testSelectDistinctWithCascadeExtractionFilter() { testQuery( "SELECT distinct dim1 FROM druid.foo WHERE substring(substring(dim1, 2), 1, 1) = 'e' OR dim2 = 'a'", @@ -635,7 +635,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithStrlenFilter() throws Exception + public void testSelectDistinctWithStrlenFilter() { // Cannot vectorize due to usage of expressions. cannotVectorize(); @@ -675,7 +675,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithLimit() throws Exception + public void testSelectDistinctWithLimit() { // Should use topN even if approximate topNs are off, because this query is exact. @@ -708,7 +708,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithSortAsOuterQuery() throws Exception + public void testSelectDistinctWithSortAsOuterQuery() { testQuery( "SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2) LIMIT 10", @@ -739,7 +739,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithSortAsOuterQuery2() throws Exception + public void testSelectDistinctWithSortAsOuterQuery2() { testQuery( "SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2 LIMIT 5) LIMIT 10", @@ -770,7 +770,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectDistinctWithSortAsOuterQuery3() throws Exception + public void testSelectDistinctWithSortAsOuterQuery3() { testQuery( "SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2 DESC LIMIT 5) LIMIT 10", @@ -801,7 +801,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectNonAggregatingWithLimitLiterallyZero() throws Exception + public void testSelectNonAggregatingWithLimitLiterallyZero() { // Query reduces to LIMIT 0. @@ -827,7 +827,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectNonAggregatingWithLimitReducedToZero() throws Exception + public void testSelectNonAggregatingWithLimitReducedToZero() { // Query reduces to LIMIT 0. @@ -852,7 +852,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectAggregatingWithLimitReducedToZero() throws Exception + public void testSelectAggregatingWithLimitReducedToZero() { // Query reduces to LIMIT 0. @@ -877,7 +877,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectCurrentTimeAndDateLosAngeles() throws Exception + public void testSelectCurrentTimeAndDateLosAngeles() { final Map context = new HashMap<>(QUERY_CONTEXT_DEFAULT); context.put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00.123Z"); @@ -953,7 +953,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectCurrentTimePrecisionTooHigh() throws Exception + public void testSelectCurrentTimePrecisionTooHigh() { testQueryThrows( "SELECT CURRENT_TIMESTAMP(4)", @@ -967,7 +967,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectCountStar() throws Exception + public void testSelectCountStar() { // timeseries with all granularity have a single group, so should return default results for given aggregators // which for count is 0 and sum is null in sql compatible mode or 0.0 in default mode. @@ -1058,7 +1058,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarFromLookup() throws Exception + public void testSelectStarFromLookup() { testQuery( "SELECT * FROM lookup.lookyloo", @@ -1080,7 +1080,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStar() throws Exception + public void testSelectStar() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -1108,7 +1108,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarOnForbiddenTable() throws Exception + public void testSelectStarOnForbiddenTable() { assertQueryIsForbidden( "SELECT * FROM druid.forbiddenDatasource", @@ -1152,7 +1152,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarOnForbiddenView() throws Exception + public void testSelectStarOnForbiddenView() { assertQueryIsForbidden( "SELECT * FROM view.forbiddenView", @@ -1193,7 +1193,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarOnRestrictedView() throws Exception + public void testSelectStarOnRestrictedView() { testQuery( PLANNER_CONFIG_DEFAULT, @@ -1245,7 +1245,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testUnqualifiedTableName() throws Exception + public void testUnqualifiedTableName() { testQuery( "SELECT COUNT(*) FROM foo", @@ -1265,7 +1265,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testExplainSelectStar() throws Exception + public void testExplainSelectStar() { // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); @@ -1310,7 +1310,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarWithLimit() throws Exception + public void testSelectStarWithLimit() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -1335,7 +1335,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarWithLimitAndOffset() throws Exception + public void testSelectStarWithLimitAndOffset() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -1361,7 +1361,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectWithProjection() throws Exception + public void testSelectWithProjection() { testQuery( "SELECT SUBSTRING(dim2, 1, 1) FROM druid.foo LIMIT 2", @@ -1386,7 +1386,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectWithExpressionFilter() throws Exception + public void testSelectWithExpressionFilter() { testQuery( "SELECT dim1 FROM druid.foo WHERE m1 + 1 = 7", @@ -1410,7 +1410,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarWithLimitTimeDescending() throws Exception + public void testSelectStarWithLimitTimeDescending() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -1436,7 +1436,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarWithoutLimitTimeAscending() throws Exception + public void testSelectStarWithoutLimitTimeAscending() { testQuery( PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE, @@ -1467,7 +1467,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest @Test - public void testSelectSingleColumnTwice() throws Exception + public void testSelectSingleColumnTwice() { testQuery( "SELECT dim2 x, dim2 y FROM druid.foo LIMIT 2", @@ -1489,7 +1489,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectSingleColumnWithLimitDescending() throws Exception + public void testSelectSingleColumnWithLimitDescending() { testQuery( "SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 2", @@ -1512,7 +1512,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectStarFromSelectSingleColumnWithLimitDescending() throws Exception + public void testSelectStarFromSelectSingleColumnWithLimitDescending() { // After upgrading to Calcite 1.21, Calcite no longer respects the ORDER BY __time DESC // in the inner query. This is valid, as the SQL standard considers the subquery results to be an unordered @@ -1538,7 +1538,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectLimitWrapping() throws Exception + public void testSelectLimitWrapping() { testQuery( "SELECT dim1 FROM druid.foo ORDER BY __time DESC", @@ -1562,7 +1562,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectLimitWrappingOnTopOfOffset() throws Exception + public void testSelectLimitWrappingOnTopOfOffset() { testQuery( "SELECT dim1 FROM druid.foo ORDER BY __time DESC OFFSET 1", @@ -1587,7 +1587,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectLimitWrappingOnTopOfOffsetAndLowLimit() throws Exception + public void testSelectLimitWrappingOnTopOfOffsetAndLowLimit() { testQuery( "SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 1 OFFSET 1", @@ -1611,7 +1611,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectLimitWrappingOnTopOfOffsetAndHighLimit() throws Exception + public void testSelectLimitWrappingOnTopOfOffsetAndHighLimit() { testQuery( "SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 10 OFFSET 1", @@ -1636,7 +1636,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectLimitWrappingAgainAkaIDontReallyQuiteUnderstandCalciteQueryPlanning() throws Exception + public void testSelectLimitWrappingAgainAkaIDontReallyQuiteUnderstandCalciteQueryPlanning() { // this test is for a specific bug encountered where the 2nd query would not plan with auto limit wrapping, but if // *any* column was removed from the select output, e.g. the first query in this test, then it does plan and @@ -1784,7 +1784,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectProjectionFromSelectSingleColumnWithInnerLimitDescending() throws Exception + public void testSelectProjectionFromSelectSingleColumnWithInnerLimitDescending() { testQuery( "SELECT 'beep ' || dim1 FROM (SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 2)", @@ -1808,7 +1808,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectProjectionFromSelectSingleColumnDescending() throws Exception + public void testSelectProjectionFromSelectSingleColumnDescending() { // Regression test for https://github.com/apache/druid/issues/7768. @@ -1840,7 +1840,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testSelectProjectionFromSelectSingleColumnWithInnerAndOuterLimitDescending() throws Exception + public void testSelectProjectionFromSelectSingleColumnWithInnerAndOuterLimitDescending() { testQuery( "SELECT 'beep ' || dim1 FROM (SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 4) LIMIT 2", @@ -1864,7 +1864,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest } @Test - public void testOrderThenLimitThenFilter() throws Exception + public void testOrderThenLimitThenFilter() { testQuery( "SELECT dim1 FROM " diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSimpleQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSimpleQueryTest.java index f749e1535c7..6d196191922 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSimpleQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSimpleQueryTest.java @@ -42,7 +42,7 @@ import org.junit.Test; public class CalciteSimpleQueryTest extends BaseCalciteQueryTest { @Test - public void testGroupByTimeAndDim() throws Exception + public void testGroupByTimeAndDim() { testQuery( "SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n" @@ -90,7 +90,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTime() throws Exception + public void testGroupByDimAndTime() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -138,7 +138,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeWhereOnTime() throws Exception + public void testGroupByDimAndTimeWhereOnTime() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -182,7 +182,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeOnDim() throws Exception + public void testGroupByDimAndTimeOnDim() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -227,7 +227,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByTimeAndDimOrderByDim() throws Exception + public void testGroupByTimeAndDimOrderByDim() { testQuery( "SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n" @@ -282,7 +282,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByTimeAndDimOrderByDimDesc() throws Exception + public void testGroupByTimeAndDimOrderByDimDesc() { testQuery( "SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n" @@ -337,7 +337,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeOrderByTime() throws Exception + public void testGroupByDimAndTimeOrderByTime() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -394,7 +394,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeOrderByTimeDesc() throws Exception + public void testGroupByDimAndTimeOrderByTimeDesc() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -451,7 +451,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeOrderByTimeAndDim() throws Exception + public void testGroupByDimAndTimeOrderByTimeAndDim() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -509,7 +509,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeOrderByDimAndTime() throws Exception + public void testGroupByDimAndTimeOrderByDimAndTime() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n" @@ -567,7 +567,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest } @Test - public void testGroupByDimAndTimeAndDimOrderByDimAndTimeDim() throws Exception + public void testGroupByDimAndTimeAndDimOrderByDimAndTimeDim() { testQuery( "SELECT dim2, FLOOR(__time TO MONTH), dim1, SUM(cnt)\n" diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteTimeBoundaryQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteTimeBoundaryQueryTest.java index 6ed157d349d..bb33b714117 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteTimeBoundaryQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteTimeBoundaryQueryTest.java @@ -37,7 +37,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest { // __time for foo is [2000-01-01, 2000-01-02, 2000-01-03, 2001-01-01, 2001-01-02, 2001-01-03] @Test - public void testMaxTimeQuery() throws Exception + public void testMaxTimeQuery() { HashMap queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true); @@ -58,7 +58,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest } @Test - public void testMinTimeQuery() throws Exception + public void testMinTimeQuery() { HashMap queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true); @@ -79,7 +79,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest } @Test - public void testMinTimeQueryWithFilters() throws Exception + public void testMinTimeQueryWithFilters() { HashMap queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true); @@ -107,7 +107,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest // Currently, if both min(__time) and max(__time) are present, we don't convert it // to a timeBoundary query. (ref : https://github.com/apache/druid/issues/12479) @Test - public void testMinMaxTimeQuery() throws Exception + public void testMinMaxTimeQuery() { HashMap context = new HashMap<>(QUERY_CONTEXT_DEFAULT); context.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java index 1fe3266d5e1..6e8c43960c8 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java @@ -285,7 +285,10 @@ public class DruidPlannerResourceAnalyzeTest extends BaseCalciteQueryTest authConfig, sql, context, - CalciteTests.REGULAR_USER_AUTH_RESULT + // Use superuser because, in tests, only the superuser has + // permission on system tables, and we must do authorization to + // obtain resources. + CalciteTests.SUPER_USER_AUTH_RESULT ); final Set expectedResources = new HashSet<>(); if (name != null) { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java index ec088ce9987..621cfd2ddf9 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java @@ -40,7 +40,6 @@ import org.apache.druid.segment.generator.SegmentGenerator; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; @@ -111,7 +110,6 @@ public class SqlVectorizedExpressionSanityTest extends InitializedNullHandlingTe @BeforeClass public static void setupClass() { - Calcites.setSystemProperties(); ExpressionProcessing.initializeForStrictBooleansTests(true); CLOSER = Closer.create(); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheCommon.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheCommon.java index edfa22a1e8a..7ece7cacf83 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheCommon.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheCommon.java @@ -37,7 +37,6 @@ import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.easymock.EasyMock; -import org.joda.time.Period; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -52,14 +51,7 @@ import java.util.concurrent.CountDownLatch; public abstract class SegmentMetadataCacheCommon extends CalciteTestBase { - static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig() - { - @Override - public Period getMetadataRefreshPeriod() - { - return new Period("PT1S"); - } - }; + static final PlannerConfig PLANNER_CONFIG_DEFAULT = PlannerConfig.builder().metadataRefreshPeriod("PT1S").build(); static final List ROWS1 = ImmutableList.of( CalciteTests.createRow(ImmutableMap.of("t", "2000-01-01", "m1", "1.0", "dim1", "")), diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java index b41e75a9705..815b72b0aea 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java @@ -25,7 +25,6 @@ import org.apache.druid.math.expr.ExpressionProcessing; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.SimpleExtraction; -import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.http.SqlParameter; import org.junit.BeforeClass; @@ -39,7 +38,6 @@ public abstract class CalciteTestBase @BeforeClass public static void setupCalciteProperties() { - Calcites.setSystemProperties(); NullHandling.initializeForTests(); ExpressionProcessing.initializeForTests(null); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index b4bb127ffe8..1a6477688ef 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -112,7 +112,8 @@ import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.Escalator; import org.apache.druid.server.security.NoopEscalator; import org.apache.druid.server.security.ResourceType; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.SqlLifecycleManager; +import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; @@ -789,23 +790,24 @@ public class CalciteTests ); } - public static SqlLifecycleFactory createSqlLifecycleFactory(final PlannerFactory plannerFactory) + public static SqlStatementFactory createSqlLifecycleFactory(final PlannerFactory plannerFactory) { return createSqlLifecycleFactory(plannerFactory, new AuthConfig()); } - public static SqlLifecycleFactory createSqlLifecycleFactory( + public static SqlStatementFactory createSqlLifecycleFactory( final PlannerFactory plannerFactory, final AuthConfig authConfig ) { - return new SqlLifecycleFactory( + return new SqlStatementFactory( plannerFactory, new ServiceEmitter("dummy", "dummy", new NoopEmitter()), new NoopRequestLogger(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, authConfig, - Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())) + Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())), + new SqlLifecycleManager() ); } @@ -868,6 +870,7 @@ public class CalciteTests ); } + @SuppressWarnings("resource") public static SpecificSegmentsQuerySegmentWalker createMockWalker( final QueryRunnerFactoryConglomerate conglomerate, final File tmpDir, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java index 4f384e5747a..7a58503717a 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java @@ -80,7 +80,7 @@ public class QueryLogHook implements TestRule final Consumer function = query -> { try { - recordedQueries.add((Query) query); + recordedQueries.add((Query) query); log.info( "Issued query: %s", objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query) diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlHttpModuleTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlHttpModuleTest.java index c035f7c382c..69e05c12cd6 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlHttpModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlHttpModuleTest.java @@ -29,7 +29,7 @@ import org.apache.druid.guice.LifecycleModule; import org.apache.druid.guice.annotations.JSR311Resource; import org.apache.druid.guice.annotations.Json; import org.apache.druid.server.security.AuthorizerMapper; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.sql.SqlStatementFactory; import org.easymock.EasyMockRunner; import org.easymock.Mock; import org.junit.Assert; @@ -46,7 +46,7 @@ public class SqlHttpModuleTest @Mock private ObjectMapper jsonMpper; @Mock - private SqlLifecycleFactory sqlLifecycleFactory; + private SqlStatementFactory sqlLifecycleFactory; private SqlHttpModule target; private Injector injector; @@ -60,7 +60,7 @@ public class SqlHttpModuleTest new DruidGuiceExtensions(), binder -> { binder.bind(ObjectMapper.class).annotatedWith(Json.class).toInstance(jsonMpper); - binder.bind(SqlLifecycleFactory.class).toInstance(sqlLifecycleFactory); + binder.bind(SqlStatementFactory.class).toInstance(sqlLifecycleFactory); binder.bind(AuthorizerMapper.class).toInstance(new AuthorizerMapper(Collections.emptyMap())); }, target diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index 5c05f7ccc8f..5b5e0b4a465 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -31,7 +31,6 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.calcite.avatica.SqlType; -import org.apache.calcite.tools.RelConversionException; import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy; import org.apache.druid.common.exception.ErrorResponseTransformStrategy; @@ -63,23 +62,28 @@ import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.initialization.ServerConfig; -import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.scheduling.HiLoQueryLaningStrategy; import org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy; +import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.HttpStatement; import org.apache.druid.sql.SqlLifecycleManager; import org.apache.druid.sql.SqlPlanningException.PlanningError; +import org.apache.druid.sql.SqlStatementFactory; +import org.apache.druid.sql.SqlToolbox; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.planner.PlannerFactory; +import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTestBase; @@ -101,6 +105,7 @@ import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -111,10 +116,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; @@ -122,6 +129,8 @@ public class SqlResourceTest extends CalciteTestBase { private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); private static final String DUMMY_SQL_QUERY_ID = "dummy"; + private static final int WAIT_TIMEOUT_SECS = 3; + private static final Consumer NULL_ACTION = s -> {}; private static final List EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS = Arrays.asList("__time", "dim1", "dim2", "dim3", "cnt", "m1", "m2", "unique_dim1", "EXPR$8"); @@ -138,21 +147,22 @@ public class SqlResourceTest extends CalciteTestBase public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public QueryLogHook queryLogHook = QueryLogHook.create(); - private SpecificSegmentsQuerySegmentWalker walker = null; + private SpecificSegmentsQuerySegmentWalker walker; private TestRequestLogger testRequestLogger; private SqlResource resource; private HttpServletRequest req; private ListeningExecutorService executorService; private SqlLifecycleManager lifecycleManager; - private SqlLifecycleFactory sqlLifecycleFactory; + private SqlStatementFactory sqlLifecycleFactory; private CountDownLatch lifecycleAddLatch; private final SettableSupplier> validateAndAuthorizeLatchSupplier = new SettableSupplier<>(); private final SettableSupplier> planLatchSupplier = new SettableSupplier<>(); private final SettableSupplier> executeLatchSupplier = new SettableSupplier<>(); private final SettableSupplier, Sequence>> sequenceMapFnSupplier = new SettableSupplier<>(); + private Consumer onExecute = NULL_ACTION; - private boolean sleep = false; + private boolean sleep; @BeforeClass public static void setUpClass() @@ -200,14 +210,7 @@ public class SqlResourceTest extends CalciteTestBase executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s")); walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler); - final PlannerConfig plannerConfig = new PlannerConfig() - { - @Override - public boolean shouldSerializeComplexValues() - { - return false; - } - }; + final PlannerConfig plannerConfig = PlannerConfig.builder().serializeComplexValues(false).build(); final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema( conglomerate, walker, @@ -216,24 +219,7 @@ public class SqlResourceTest extends CalciteTestBase ); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); - req = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(req.getRemoteAddr()).andReturn(null).once(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) - .anyTimes(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) - .andReturn(null) - .anyTimes(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) - .anyTimes(); - req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); - EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) - .anyTimes(); - EasyMock.replay(req); + req = request(true); testRequestLogger = new TestRequestLogger(); @@ -252,7 +238,7 @@ public class SqlResourceTest extends CalciteTestBase lifecycleManager = new SqlLifecycleManager() { @Override - public void add(String sqlQueryId, SqlLifecycle lifecycle) + public void add(String sqlQueryId, Cancelable lifecycle) { super.add(sqlQueryId, lifecycle); if (lifecycleAddLatch != null) { @@ -263,31 +249,34 @@ public class SqlResourceTest extends CalciteTestBase final ServiceEmitter emitter = new NoopServiceEmitter(); final AuthConfig authConfig = new AuthConfig(); final DefaultQueryConfig defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of()); - sqlLifecycleFactory = new SqlLifecycleFactory( + sqlLifecycleFactory = new SqlStatementFactory( plannerFactory, emitter, testRequestLogger, scheduler, authConfig, - Suppliers.ofInstance(defaultQueryConfig) + Suppliers.ofInstance(defaultQueryConfig), + lifecycleManager ) { @Override - public SqlLifecycle factorize() + public HttpStatement httpStatement( + final SqlQuery sqlQuery, + final HttpServletRequest req + ) { - return new TestSqlLifecycle( - plannerFactory, - emitter, - testRequestLogger, - scheduler, - authConfig, - System.currentTimeMillis(), - System.nanoTime(), + TestHttpStatement stmt = new TestHttpStatement( + lifecycleToolbox, + sqlQuery, + req, validateAndAuthorizeLatchSupplier, planLatchSupplier, executeLatchSupplier, - sequenceMapFnSupplier + sequenceMapFnSupplier, + onExecute ); + onExecute = NULL_ACTION; + return stmt; } }; resource = new SqlResource( @@ -299,6 +288,11 @@ public class SqlResourceTest extends CalciteTestBase ); } + HttpServletRequest request(boolean ok) + { + return makeExpectedReq(CalciteTests.REGULAR_USER_AUTH_RESULT, ok); + } + @After public void tearDown() throws Exception { @@ -311,21 +305,7 @@ public class SqlResourceTest extends CalciteTestBase @Test public void testUnauthorized() throws Exception { - HttpServletRequest testRequest = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(testRequest.getRemoteAddr()).andReturn(null).once(); - EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) - .anyTimes(); - EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); - EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) - .andReturn(null) - .anyTimes(); - EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT) - .anyTimes(); - testRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); - EasyMock.expectLastCall().once(); - EasyMock.replay(testRequest); + HttpServletRequest testRequest = request(false); try { resource.doPost( @@ -358,7 +338,6 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertTrue(lifecycleManager.getAll("id").isEmpty()); } - @Test public void testCountStarExtendedCharacters() throws Exception { @@ -1284,16 +1263,29 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertTrue(lifecycleManager.getAll("id").isEmpty()); } + private void failOnExecute(String errorMessage) + { + onExecute = s -> { + throw new QueryUnsupportedException(errorMessage); + }; + } + @Test public void testUnsupportedQueryThrowsException() throws Exception { - String errorMessage = "This will be support in Druid 9999"; - SqlQuery badQuery = EasyMock.createMock(SqlQuery.class); - EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE"); - EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of(BaseQuery.SQL_QUERY_ID, "id")); - EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage)); - EasyMock.replay(badQuery); - final QueryException exception = doPost(badQuery).lhs; + String errorMessage = "This will be supported in Druid 9999"; + failOnExecute(errorMessage); + final QueryException exception = doPost( + new SqlQuery( + "SELECT ANSWER TO LIFE", + ResultFormat.OBJECT, + false, + false, + false, + ImmutableMap.of(BaseQuery.SQL_QUERY_ID, "id"), + null + ) + ).lhs; Assert.assertNotNull(exception); Assert.assertEquals(QueryUnsupportedException.ERROR_CODE, exception.getErrorCode()); @@ -1305,13 +1297,19 @@ public class SqlResourceTest extends CalciteTestBase public void testErrorResponseReturnSameQueryIdWhenSetInContext() throws Exception { String queryId = "id123"; - String errorMessage = "This will be support in Druid 9999"; - SqlQuery badQuery = EasyMock.createMock(SqlQuery.class); - EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE"); - EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", queryId)); - EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage)); - EasyMock.replay(badQuery); - final Response response = resource.doPost(badQuery, req); + String errorMessage = "This will be supported in Druid 9999"; + failOnExecute(errorMessage); + final Response response = resource.doPost( + new SqlQuery( + "SELECT ANSWER TO LIFE", + ResultFormat.OBJECT, + false, + false, + false, + ImmutableMap.of("sqlQueryId", queryId), + null + ), + req); Assert.assertNotEquals(200, response.getStatus()); final MultivaluedMap headers = response.getMetadata(); Assert.assertTrue(headers.containsKey(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER)); @@ -1322,13 +1320,19 @@ public class SqlResourceTest extends CalciteTestBase @Test public void testErrorResponseReturnNewQueryIdWhenNotSetInContext() throws Exception { - String errorMessage = "This will be support in Druid 9999"; - SqlQuery badQuery = EasyMock.createMock(SqlQuery.class); - EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE"); - EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of()); - EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage)); - EasyMock.replay(badQuery); - final Response response = resource.doPost(badQuery, req); + String errorMessage = "This will be supported in Druid 9999"; + failOnExecute(errorMessage); + final Response response = resource.doPost( + new SqlQuery( + "SELECT ANSWER TO LIFE", + ResultFormat.OBJECT, + false, + false, + false, + ImmutableMap.of(), + null + ), + req); Assert.assertNotEquals(200, response.getStatus()); final MultivaluedMap headers = response.getMetadata(); Assert.assertTrue(headers.containsKey(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER)); @@ -1344,8 +1348,7 @@ public class SqlResourceTest extends CalciteTestBase CalciteTests.TEST_AUTHORIZER_MAPPER, sqlLifecycleFactory, lifecycleManager, - new ServerConfig() - { + new ServerConfig() { @Override public boolean isShowDetailedJettyErrors() { @@ -1360,13 +1363,19 @@ public class SqlResourceTest extends CalciteTestBase } ); - String errorMessage = "This will be support in Druid 9999"; - SqlQuery badQuery = EasyMock.createMock(SqlQuery.class); - EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE"); - EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id")); - EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage)); - EasyMock.replay(badQuery); - final QueryException exception = doPost(badQuery).lhs; + String errorMessage = "This will be supported in Druid 9999"; + failOnExecute(errorMessage); + final QueryException exception = doPost( + new SqlQuery( + "SELECT ANSWER TO LIFE", + ResultFormat.OBJECT, + false, + false, + false, + ImmutableMap.of("sqlQueryId", "id"), + null + ) + ).lhs; Assert.assertNotNull(exception); Assert.assertNull(exception.getMessage()); @@ -1401,17 +1410,26 @@ public class SqlResourceTest extends CalciteTestBase ); String errorMessage = "could not assert"; - SqlQuery badQuery = EasyMock.createMock(SqlQuery.class); - EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE"); - EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id")); - EasyMock.expect(badQuery.getParameterList()).andThrow(new Error(errorMessage)); - EasyMock.replay(badQuery); - final QueryException exception = doPost(badQuery).lhs; + failOnExecute(errorMessage); + onExecute = s -> { + throw new Error(errorMessage); + }; + final QueryException exception = doPost( + new SqlQuery( + "SELECT ANSWER TO LIFE", + ResultFormat.OBJECT, + false, + false, + false, + ImmutableMap.of("sqlQueryId", "id"), + null + ) + ).lhs; Assert.assertNotNull(exception); Assert.assertNull(exception.getMessage()); Assert.assertNull(exception.getHost()); - Assert.assertEquals(exception.getErrorCode(), QueryInterruptedException.UNKNOWN_EXCEPTION); + Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, exception.getErrorCode()); Assert.assertNull(exception.getErrorClass()); Assert.assertTrue(lifecycleManager.getAll("id").isEmpty()); } @@ -1446,7 +1464,6 @@ public class SqlResourceTest extends CalciteTestBase })); } - int success = 0; int limited = 0; for (int i = 0; i < numQueries; i++) { @@ -1491,7 +1508,6 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals(timeoutException.getErrorCode(), QueryTimeoutException.ERROR_CODE); Assert.assertEquals(timeoutException.getErrorClass(), QueryTimeoutException.class.getName()); Assert.assertTrue(lifecycleManager.getAll(sqlQueryId).isEmpty()); - } @Test @@ -1509,8 +1525,8 @@ public class SqlResourceTest extends CalciteTestBase makeRegularUserReq() ) ); - Assert.assertTrue(validateAndAuthorizeLatch.await(1, TimeUnit.SECONDS)); - Assert.assertTrue(lifecycleAddLatch.await(1, TimeUnit.SECONDS)); + Assert.assertTrue(validateAndAuthorizeLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)); + Assert.assertTrue(lifecycleAddLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)); Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel()); planLatch.countDown(); Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus()); @@ -1521,7 +1537,7 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus()); QueryException exception = JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryException.class); Assert.assertEquals( - QueryInterruptedException.QUERY_CANCELLED, + QueryInterruptedException.QUERY_CANCELED, exception.getErrorCode() ); } @@ -1540,7 +1556,7 @@ public class SqlResourceTest extends CalciteTestBase makeRegularUserReq() ) ); - Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS)); + Assert.assertTrue(planLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)); Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel()); execLatch.countDown(); Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus()); @@ -1551,7 +1567,7 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus()); QueryException exception = JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryException.class); Assert.assertEquals( - QueryInterruptedException.QUERY_CANCELLED, + QueryInterruptedException.QUERY_CANCELED, exception.getErrorCode() ); } @@ -1570,7 +1586,7 @@ public class SqlResourceTest extends CalciteTestBase makeRegularUserReq() ) ); - Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS)); + Assert.assertTrue(planLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)); Response response = resource.cancelQuery("invalidQuery", mockRequestForCancel()); Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus()); @@ -1595,7 +1611,7 @@ public class SqlResourceTest extends CalciteTestBase makeSuperUserReq() ) ); - Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS)); + Assert.assertTrue(planLatch.await(3, TimeUnit.SECONDS)); Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel()); Assert.assertEquals(Status.FORBIDDEN.getStatusCode(), response.getStatus()); @@ -1683,6 +1699,7 @@ public class SqlResourceTest extends CalciteTestBase } // Returns either an error or a result, assuming the result is a JSON object. + @SuppressWarnings("unchecked") private Pair doPost( final SqlQuery query, final HttpServletRequest req, @@ -1736,12 +1753,17 @@ public class SqlResourceTest extends CalciteTestBase } private HttpServletRequest makeExpectedReq(AuthenticationResult authenticationResult) + { + return makeExpectedReq(authenticationResult, true); + } + + private HttpServletRequest makeExpectedReq(AuthenticationResult authenticationResult, boolean ok) { HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(req.getRemoteAddr()).andReturn(null).once(); EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); + EasyMock.expect(req.getRemoteAddr()).andReturn(null).once(); EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) .andReturn(null) @@ -1749,7 +1771,7 @@ public class SqlResourceTest extends CalciteTestBase EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); - req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, ok); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) @@ -1788,104 +1810,113 @@ public class SqlResourceTest extends CalciteTestBase }; } - private static class TestSqlLifecycle extends SqlLifecycle + private static class TestHttpStatement extends HttpStatement { private final SettableSupplier> validateAndAuthorizeLatchSupplier; private final SettableSupplier> planLatchSupplier; private final SettableSupplier> executeLatchSupplier; private final SettableSupplier, Sequence>> sequenceMapFnSupplier; + private final Consumer onExecute; - private TestSqlLifecycle( - PlannerFactory plannerFactory, - ServiceEmitter emitter, - RequestLogger requestLogger, - QueryScheduler queryScheduler, - AuthConfig authConfig, - long startMs, - long startNs, + private TestHttpStatement( + final SqlToolbox lifecycleContext, + final SqlQuery sqlQuery, + final HttpServletRequest req, SettableSupplier> validateAndAuthorizeLatchSupplier, SettableSupplier> planLatchSupplier, SettableSupplier> executeLatchSupplier, - SettableSupplier, Sequence>> sequenceMapFnSupplier + SettableSupplier, Sequence>> sequenceMapFnSupplier, + final Consumer onAuthorize ) { - super(plannerFactory, emitter, requestLogger, queryScheduler, authConfig, new DefaultQueryConfig(ImmutableMap.of()), startMs, startNs); + super(lifecycleContext, sqlQuery, req); this.validateAndAuthorizeLatchSupplier = validateAndAuthorizeLatchSupplier; this.planLatchSupplier = planLatchSupplier; this.executeLatchSupplier = executeLatchSupplier; this.sequenceMapFnSupplier = sequenceMapFnSupplier; + this.onExecute = onAuthorize; } @Override - public void validateAndAuthorize(HttpServletRequest req) + protected void authorize( + DruidPlanner planner, + Function, Access> authorizer) { if (validateAndAuthorizeLatchSupplier.get() != null) { if (validateAndAuthorizeLatchSupplier.get().rhs) { - super.validateAndAuthorize(req); + super.authorize(planner, authorizer); validateAndAuthorizeLatchSupplier.get().lhs.countDown(); } else { try { - if (!validateAndAuthorizeLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) { + if (!validateAndAuthorizeLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) { throw new RuntimeException("Latch timed out"); } } catch (InterruptedException e) { throw new RuntimeException(e); } - super.validateAndAuthorize(req); + super.authorize(planner, authorizer); } } else { - super.validateAndAuthorize(req); + super.authorize(planner, authorizer); } } @Override - public void plan() throws RelConversionException + public PlannerResult plan(DruidPlanner planner) { if (planLatchSupplier.get() != null) { if (planLatchSupplier.get().rhs) { - super.plan(); + PlannerResult result = super.plan(planner); planLatchSupplier.get().lhs.countDown(); + return result; } else { try { - if (!planLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) { + if (!planLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) { throw new RuntimeException("Latch timed out"); } } catch (InterruptedException e) { throw new RuntimeException(e); } - super.plan(); + return super.plan(planner); } } else { - super.plan(); + return super.plan(planner); } } @Override public Sequence execute() + { + onExecute.accept(this); + return super.execute(); + } + + @Override + public Sequence doExecute() { final Function, Sequence> sequenceMapFn = Optional.ofNullable(sequenceMapFnSupplier.get()).orElse(Function.identity()); if (executeLatchSupplier.get() != null) { if (executeLatchSupplier.get().rhs) { - Sequence sequence = sequenceMapFn.apply(super.execute()); + Sequence sequence = sequenceMapFn.apply(super.doExecute()); executeLatchSupplier.get().lhs.countDown(); return sequence; } else { try { - if (!executeLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) { + if (!executeLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) { throw new RuntimeException("Latch timed out"); } } catch (InterruptedException e) { throw new RuntimeException(e); } - return sequenceMapFn.apply(super.execute()); + return sequenceMapFn.apply(super.doExecute()); } } else { - return sequenceMapFn.apply(super.execute()); + return sequenceMapFn.apply(super.doExecute()); } } }