Refactor SqlLifecycle into statement classes (#12845)

* Refactor SqlLifecycle into statement classes

Create direct & prepared statements
Remove redundant exceptions from tests
Tidy up Calcite query tests
Make PlannerConfig more testable

* Build fixes

* Added builder to SqlQueryPlus

* Moved Calcites system properties to saffron.properties

* Build fix

* Resolve merge conflict

* Fix IntelliJ inspection issue

* Revisions from reviews

Backed out a revision to Calcite tests that didn't work out as planned

* Build fix

* Fixed spelling errors

* Fixed failed test

Prepare now enforces security; before it did not.

* Rebase and fix IntelliJ inspections issue

* Clean up exception handling

* Fix handling of JDBC auth errors

* Build fix

* More tweaks to security messages
This commit is contained in:
Paul Rogers 2022-08-14 00:44:08 -07:00 committed by GitHub
parent 4d65c08576
commit 41712b7a3a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
76 changed files with 3152 additions and 2571 deletions

View File

@ -48,7 +48,6 @@ import org.apache.druid.sql.calcite.aggregation.builtin.CountSqlAggregator;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
@ -75,6 +74,7 @@ import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import javax.annotation.Nullable;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@ -92,7 +92,6 @@ public class SqlBenchmark
{
static {
NullHandling.initializeForTests();
Calcites.setSystemProperties();
}
private static final Logger log = new Logger(SqlBenchmark.class);

View File

@ -39,7 +39,6 @@ import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.sql.calcite.SqlVectorizedExpressionSanityTest;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -65,6 +64,7 @@ import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -82,7 +82,6 @@ public class SqlExpressionBenchmark
static {
NullHandling.initializeForTests();
Calcites.setSystemProperties();
ExpressionProcessing.initializeForStrictBooleansTests(true);
}

View File

@ -45,7 +45,6 @@ import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.sql.calcite.SqlVectorizedExpressionSanityTest;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -71,6 +70,7 @@ import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -85,7 +85,6 @@ public class SqlNestedDataBenchmark
static {
NullHandling.initializeForTests();
Calcites.setSystemProperties();
ExpressionProcessing.initializeForStrictBooleansTests(true);
}

View File

@ -117,7 +117,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testComputingSketchOnNumericValues() throws Exception
public void testComputingSketchOnNumericValues()
{
cannotVectorize();
@ -145,7 +145,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testComputingSketchOnCastedString() throws Exception
public void testComputingSketchOnCastedString()
{
cannotVectorize();
@ -185,7 +185,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testDefaultCompressionForTDigestGenerateSketchAgg() throws Exception
public void testDefaultCompressionForTDigestGenerateSketchAgg()
{
cannotVectorize();
@ -211,7 +211,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testComputingQuantileOnPreAggregatedSketch() throws Exception
public void testComputingQuantileOnPreAggregatedSketch()
{
cannotVectorize();
@ -253,7 +253,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGeneratingSketchAndComputingQuantileOnFly() throws Exception
public void testGeneratingSketchAndComputingQuantileOnFly()
{
cannotVectorize();
@ -308,7 +308,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnNumericValues() throws Exception
public void testQuantileOnNumericValues()
{
cannotVectorize();
@ -345,7 +345,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testCompressionParamForTDigestQuantileAgg() throws Exception
public void testCompressionParamForTDigestQuantileAgg()
{
cannotVectorize();
testQuery(
@ -383,7 +383,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnCastedString() throws Exception
public void testQuantileOnCastedString()
{
cannotVectorize();
@ -436,7 +436,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
cannotVectorize();
@ -468,7 +468,7 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
cannotVectorize();
testQuery(

View File

@ -85,6 +85,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
return Iterables.concat(super.getJacksonModules(), new HllSketchModule().getJacksonModules());
}
@SuppressWarnings("resource")
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException
{
@ -149,7 +150,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testApproxCountDistinctHllSketch() throws Exception
public void testApproxCountDistinctHllSketch()
{
// Can't vectorize due to SUBSTRING expression.
cannotVectorize();
@ -244,7 +245,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
@Test
public void testAvgDailyCountDistinctHllSketch() throws Exception
public void testAvgDailyCountDistinctHllSketch()
{
// Can't vectorize due to outer query, which runs on an inline datasource.
cannotVectorize();
@ -340,7 +341,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testApproxCountDistinctHllSketchIsRounded() throws Exception
public void testApproxCountDistinctHllSketchIsRounded()
{
testQuery(
"SELECT"
@ -376,7 +377,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testHllSketchPostAggs() throws Exception
public void testHllSketchPostAggs()
{
final String sketchSummary = "### HLL SKETCH SUMMARY: \n"
+ " Log Config K : 12\n"
@ -528,7 +529,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testtHllSketchPostAggsPostSort() throws Exception
public void testtHllSketchPostAggsPostSort()
{
final String sketchSummary = "### HLL SKETCH SUMMARY: \n"
+ " Log Config K : 12\n"
@ -582,7 +583,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
// timeseries with all granularity have a single group, so should return default results for given aggregators
testQuery(
@ -620,7 +621,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
testQuery(
"SELECT\n"

View File

@ -140,7 +140,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnFloatAndLongs() throws Exception
public void testQuantileOnFloatAndLongs()
{
testQuery(
"SELECT\n"
@ -213,7 +213,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnComplexColumn() throws Exception
public void testQuantileOnComplexColumn()
{
testQuery(
"SELECT\n"
@ -270,7 +270,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnCastedString() throws Exception
public void testQuantileOnCastedString()
{
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
@ -363,7 +363,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnInnerQuery() throws Exception
public void testQuantileOnInnerQuery()
{
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
@ -429,7 +429,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnInnerQuantileQuery() throws Exception
public void testQuantileOnInnerQuantileQuery()
{
ImmutableList.Builder<Object[]> builder = ImmutableList.builder();
builder.add(new Object[]{"", 1.0});
@ -496,7 +496,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testDoublesSketchPostAggs() throws Exception
public void testDoublesSketchPostAggs()
{
testQuery(
"SELECT\n"
@ -679,7 +679,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testDoublesSketchPostAggsPostSort() throws Exception
public void testDoublesSketchPostAggsPostSort()
{
testQuery(
"SELECT DS_GET_QUANTILE(y, 0.5), DS_GET_QUANTILE(y, 0.98) from ("
@ -728,7 +728,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
testQuery(
"SELECT\n"
@ -768,7 +768,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
testQuery(
"SELECT\n"
@ -828,7 +828,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testSuccessWithSmallMaxStreamLength() throws Exception
public void testSuccessWithSmallMaxStreamLength()
{
final Map<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
context.put(

View File

@ -145,7 +145,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testApproxCountDistinctThetaSketch() throws Exception
public void testApproxCountDistinctThetaSketch()
{
// Cannot vectorize due to SUBSTRING.
cannotVectorize();
@ -265,7 +265,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testAvgDailyCountDistinctThetaSketch() throws Exception
public void testAvgDailyCountDistinctThetaSketch()
{
// Can't vectorize due to outer query (it operates on an inlined data source, which cannot be vectorized).
cannotVectorize();
@ -359,7 +359,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testThetaSketchPostAggs() throws Exception
public void testThetaSketchPostAggs()
{
final List<Object[]> expectedResults;
@ -529,7 +529,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testThetaSketchPostAggsPostSort() throws Exception
public void testThetaSketchPostAggsPostSort()
{
final String sql = "SELECT DS_THETA(dim2) as y FROM druid.foo ORDER BY THETA_SKETCH_ESTIMATE(DS_THETA(dim2)) DESC LIMIT 10";
@ -579,7 +579,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
testQuery(
"SELECT\n"
@ -638,7 +638,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
testQuery(
"SELECT\n"

View File

@ -52,6 +52,7 @@ import org.apache.druid.sql.http.SqlParameter;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -88,7 +89,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
}
@Test
public void testBloomFilter() throws Exception
public void testBloomFilter() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addString("def");
@ -116,7 +117,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
}
@Test
public void testBloomFilterExprFilter() throws Exception
public void testBloomFilterExprFilter() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addString("a-foo");
@ -157,7 +158,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
}
@Test
public void testBloomFilterVirtualColumn() throws Exception
public void testBloomFilterVirtualColumn() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addString("def-foo");
@ -187,7 +188,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
@Test
public void testBloomFilterVirtualColumnNumber() throws Exception
public void testBloomFilterVirtualColumnNumber() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addFloat(20.2f);
@ -218,7 +219,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
}
@Test
public void testBloomFilters() throws Exception
public void testBloomFilters() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addString("def");
@ -254,7 +255,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
@Ignore("this test is really slow and is intended to use for comparisons with testBloomFilterBigParameter")
@Test
public void testBloomFilterBigNoParam() throws Exception
public void testBloomFilterBigNoParam() throws IOException
{
BloomKFilter filter = new BloomKFilter(5_000_000);
filter.addString("def");
@ -282,7 +283,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
@Ignore("this test is for comparison with testBloomFilterBigNoParam")
@Test
public void testBloomFilterBigParameter() throws Exception
public void testBloomFilterBigParameter() throws IOException
{
BloomKFilter filter = new BloomKFilter(5_000_000);
filter.addString("def");
@ -310,7 +311,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
}
@Test
public void testBloomFilterNullParameter() throws Exception
public void testBloomFilterNullParameter() throws IOException
{
BloomKFilter filter = new BloomKFilter(1500);
filter.addBytes(null, 0, 0);

View File

@ -125,7 +125,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
@Test
public void testQuantileOnFloatAndLongs() throws Exception
public void testQuantileOnFloatAndLongs()
{
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
@ -238,7 +238,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
}
@Test
public void testQuantileOnCastedString() throws Exception
public void testQuantileOnCastedString()
{
testQuery(
"SELECT\n"
@ -354,7 +354,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
}
@Test
public void testQuantileOnComplexColumn() throws Exception
public void testQuantileOnComplexColumn()
{
cannotVectorize();
@ -446,7 +446,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
}
@Test
public void testQuantileOnInnerQuery() throws Exception
public void testQuantileOnInnerQuery()
{
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
@ -513,7 +513,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
cannotVectorize();
@ -564,7 +564,7 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
cannotVectorize();
testQuery(

View File

@ -123,7 +123,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnFloatAndLongs() throws Exception
public void testQuantileOnFloatAndLongs()
{
testQuery(
"SELECT\n"
@ -195,7 +195,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnComplexColumn() throws Exception
public void testQuantileOnComplexColumn()
{
testQuery(
"SELECT\n"
@ -243,7 +243,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnInnerQuery() throws Exception
public void testQuantileOnInnerQuery()
{
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
@ -309,7 +309,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testQuantileOnCastedString() throws Exception
public void testQuantileOnCastedString()
{
cannotVectorize();
@ -372,7 +372,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
testQuery(
"SELECT\n"
@ -404,7 +404,7 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
testQuery(
"SELECT\n"

View File

@ -161,7 +161,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testVarPop() throws Exception
public void testVarPop()
{
VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector();
VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector();
@ -208,7 +208,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testVarSamp() throws Exception
public void testVarSamp()
{
VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector();
VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector();
@ -255,7 +255,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testStdDevPop() throws Exception
public void testStdDevPop()
{
VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector();
VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector();
@ -310,7 +310,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testStdDevSamp() throws Exception
public void testStdDevSamp()
{
VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector();
VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector();
@ -363,7 +363,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testStdDevWithVirtualColumns() throws Exception
public void testStdDevWithVirtualColumns()
{
VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector();
VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector();
@ -422,7 +422,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
@Test
public void testVarianceOrderBy() throws Exception
public void testVarianceOrderBy()
{
List<Object[]> expectedResults = NullHandling.sqlCompatible()
? ImmutableList.of(
@ -467,7 +467,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testVariancesOnCastedString() throws Exception
public void testVariancesOnCastedString()
{
testQuery(
"SELECT\n"
@ -507,7 +507,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testEmptyTimeseriesResults() throws Exception
public void testEmptyTimeseriesResults()
{
testQuery(
"SELECT\n"
@ -557,7 +557,7 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAggregatorDefaultValues() throws Exception
public void testGroupByAggregatorDefaultValues()
{
testQuery(
"SELECT\n"

View File

@ -67,7 +67,7 @@ public class SleepSqlTest extends BaseCalciteQueryTest
}
@Test
public void testSleepFunction() throws Exception
public void testSleepFunction()
{
testQuery(
"SELECT sleep(m1) from foo where m1 < 2.0",

View File

@ -107,10 +107,10 @@ public class ITSqlCancelTest
throw new ISE("Query is not canceled after cancel request");
}
QueryException queryException = jsonMapper.readValue(queryResponse.getContent(), QueryException.class);
if (!QueryInterruptedException.QUERY_CANCELLED.equals(queryException.getErrorCode())) {
if (!QueryInterruptedException.QUERY_CANCELED.equals(queryException.getErrorCode())) {
throw new ISE(
"Expected error code [%s], actual [%s]",
QueryInterruptedException.QUERY_CANCELLED,
QueryInterruptedException.QUERY_CANCELED,
queryException.getErrorCode()
);
}

View File

@ -311,7 +311,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Unauthorized\"}"
);
LOG.info("Checking sys.servers query as datasourceOnlyUser...");
@ -319,7 +319,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Unauthorized\"}"
);
LOG.info("Checking sys.server_segments query as datasourceOnlyUser...");
@ -327,7 +327,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Unauthorized\"}"
);
LOG.info("Checking sys.tasks query as datasourceOnlyUser...");
@ -335,7 +335,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_TASKS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Unauthorized\"}"
);
}
@ -366,7 +366,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceAndSysUserClient,
SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}"
);
LOG.info("Checking sys.server_segments query as datasourceAndSysUser...");
@ -374,7 +374,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceAndSysUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
"{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}"
);
LOG.info("Checking sys.tasks query as datasourceAndSysUser...");
@ -652,15 +652,13 @@ public abstract class AbstractAuthConfigurationTest
protected void testAvaticaQuery(Properties connectionProperties, String url)
{
LOG.info("URL: " + url);
try {
Connection connection = DriverManager.getConnection(url, connectionProperties);
Statement statement = connection.createStatement();
try (
Connection connection = DriverManager.getConnection(url, connectionProperties);
Statement statement = connection.createStatement()) {
statement.setMaxRows(450);
String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
ResultSet resultSet = statement.executeQuery(query);
Assert.assertTrue(resultSet.next());
statement.close();
connection.close();
}
catch (Exception e) {
throw new RuntimeException(e);
@ -681,9 +679,9 @@ public abstract class AbstractAuthConfigurationTest
throws Exception
{
LOG.info("URL: " + url);
try {
Connection connection = DriverManager.getConnection(url, connectionProperties);
Statement statement = connection.createStatement();
try (
Connection connection = DriverManager.getConnection(url, connectionProperties);
Statement statement = connection.createStatement()) {
statement.setMaxRows(450);
String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
statement.executeQuery(query);

View File

@ -47,8 +47,8 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest
private static final String BASIC_AUTHENTICATOR = "basic";
private static final String BASIC_AUTHORIZER = "basic";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User metadata store authentication failed. -> BasicSecurityAuthenticationException: User metadata store authentication failed.";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: ForbiddenException: Allowed:false, Message:";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User metadata store authentication failed.";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized";
private HttpClient druid99;

View File

@ -53,8 +53,8 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT
private static final String LDAP_AUTHENTICATOR = "ldap";
private static final String LDAP_AUTHORIZER = "ldapauth";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User LDAP authentication failed. -> BasicSecurityAuthenticationException: User LDAP authentication failed.";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: ForbiddenException: Allowed:false, Message:";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User LDAP authentication failed.";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized";
@Inject
IntegrationTestingConfig config;

View File

@ -44,7 +44,9 @@ import java.util.concurrent.CancellationException;
public class QueryInterruptedException extends QueryException
{
public static final String QUERY_INTERRUPTED = "Query interrupted";
public static final String QUERY_CANCELLED = "Query cancelled";
// Note: the proper spelling is with a single "l", but the version with
// two "l"s is documented, we can't change the text of the message.
public static final String QUERY_CANCELED = "Query cancelled";
public static final String UNAUTHORIZED = "Unauthorized request";
public static final String UNSUPPORTED_OPERATION = "Unsupported operation";
public static final String TRUNCATED_RESPONSE_CONTEXT = "Truncated response context";
@ -96,7 +98,7 @@ public class QueryInterruptedException extends QueryException
} else if (e instanceof InterruptedException) {
return QUERY_INTERRUPTED;
} else if (e instanceof CancellationException) {
return QUERY_CANCELLED;
return QUERY_CANCELED;
} else if (e instanceof UnsupportedOperationException) {
return UNSUPPORTED_OPERATION;
} else if (e instanceof TruncatedResponseContextException) {

View File

@ -352,7 +352,7 @@ public class RunAllFullyWidgetTest extends FrameProcessorExecutorTest.BaseFrameP
Assert.assertTrue(future.cancel(true));
Assert.assertTrue(future.isCancelled());
// We don't have a good way to wait for future cancelation to truly finish. Resort to a waiting-loop.
// We don't have a good way to wait for future cancellation to truly finish. Resort to a waiting-loop.
while (exec.cancelableProcessorCount() > 0) {
Thread.sleep(10);
}

View File

@ -265,14 +265,14 @@ public class QueryContextTest
@Test
public void testLegacyReturnsLegacy()
{
Query legacy = new LegacyContextQuery(ImmutableMap.of("foo", "bar"));
Query<?> legacy = new LegacyContextQuery(ImmutableMap.of("foo", "bar"));
Assert.assertNull(legacy.getQueryContext());
}
@Test
public void testNonLegacyIsNotLegacyContext()
{
Query timeseries = Druids.newTimeseriesQueryBuilder()
Query<?> timeseries = Druids.newTimeseriesQueryBuilder()
.dataSource("test")
.intervals("2015-01-02/2015-01-03")
.granularity(Granularities.DAY)

View File

@ -19,10 +19,13 @@
package org.apache.druid.server.security;
import com.google.common.base.Strings;
import org.apache.druid.java.util.common.StringUtils;
public class Access
{
static final String DEFAULT_ERROR_MESSAGE = "Unauthorized";
public static final Access OK = new Access(true);
private final boolean allowed;
@ -49,6 +52,17 @@ public class Access
return message;
}
public String toMessage()
{
if (!Strings.isNullOrEmpty(message)) {
return toString();
} else if (allowed) {
return "Authorized";
} else {
return DEFAULT_ERROR_MESSAGE;
}
}
@Override
public String toString()
{

View File

@ -32,11 +32,9 @@ import java.util.function.Function;
*/
public class ForbiddenException extends RuntimeException implements SanitizableException
{
static final String DEFAULT_ERROR_MESSAGE = "Unauthorized.";
public ForbiddenException()
{
super(DEFAULT_ERROR_MESSAGE);
super(Access.DEFAULT_ERROR_MESSAGE);
}
@JsonCreator

View File

@ -90,7 +90,7 @@ public class JsonParserIteratorTest
}
@Test
public void testConvertFutureCancelationToQueryInterruptedException()
public void testConvertFutureCancellationToQueryInterruptedException()
{
JsonParserIterator<Object> iterator = new JsonParserIterator<>(
JAVA_TYPE,

View File

@ -45,7 +45,7 @@ public class ForbiddenExceptionTest
ForbiddenException forbiddenException = new ForbiddenException(ERROR_MESSAGE_ORIGINAL);
ForbiddenException actual = forbiddenException.sanitize(trasformFunction);
Assert.assertNotNull(actual);
Assert.assertEquals(actual.getMessage(), ForbiddenException.DEFAULT_ERROR_MESSAGE);
Assert.assertEquals(actual.getMessage(), Access.DEFAULT_ERROR_MESSAGE);
Mockito.verify(trasformFunction).apply(ArgumentMatchers.eq(ERROR_MESSAGE_ORIGINAL));
Mockito.verifyNoMoreInteractions(trasformFunction);
}
@ -61,4 +61,27 @@ public class ForbiddenExceptionTest
Mockito.verify(trasformFunction).apply(ArgumentMatchers.eq(ERROR_MESSAGE_ORIGINAL));
Mockito.verifyNoMoreInteractions(trasformFunction);
}
// Silly, but required to get the code coverage tests to pass.
@Test
public void testAccess()
{
Access access = new Access(false);
Assert.assertFalse(access.isAllowed());
Assert.assertEquals("", access.getMessage());
Assert.assertEquals("Allowed:false, Message:", access.toString());
Assert.assertEquals(Access.DEFAULT_ERROR_MESSAGE, access.toMessage());
access = new Access(true);
Assert.assertTrue(access.isAllowed());
Assert.assertEquals("", access.getMessage());
Assert.assertEquals("Allowed:true, Message:", access.toString());
Assert.assertEquals("Authorized", access.toMessage());
access = new Access(false, "oops");
Assert.assertFalse(access.isAllowed());
Assert.assertEquals("oops", access.getMessage());
Assert.assertEquals("Allowed:false, Message:oops", access.toString());
Assert.assertEquals("Allowed:false, Message:oops", access.toMessage());
}
}

View File

@ -0,0 +1,223 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import java.io.Closeable;
import java.util.Set;
import java.util.UUID;
import java.util.function.Function;
/**
* Represents a SQL statement either for preparation or execution.
* A statement is given by a lifecycle context and the statement
* to execute. See derived classes for actions. Closing the statement
* emits logs and metrics for the statement.
*/
public abstract class AbstractStatement implements Closeable
{
private static final Logger log = new Logger(AbstractStatement.class);
protected final SqlToolbox sqlToolbox;
protected final SqlQueryPlus queryPlus;
protected final SqlExecutionReporter reporter;
protected PlannerContext plannerContext;
/**
* Resource actions used with authorizing a cancellation request. These actions
* include only the data-level actions (i.e. the datasource.)
*/
protected Set<ResourceAction> cancellationResourceActions;
/**
* Full resource actions authorized as part of this request. Used when logging
* resource actions. Includes the query context, if query context authorization
* is enabled.
*/
protected Set<ResourceAction> fullResourceActions;
public AbstractStatement(
final SqlToolbox sqlToolbox,
final SqlQueryPlus queryPlus,
final String remoteAddress
)
{
this.sqlToolbox = sqlToolbox;
this.queryPlus = queryPlus;
this.reporter = new SqlExecutionReporter(this, remoteAddress);
// Context is modified, not copied.
contextWithSqlId(queryPlus.context())
.addDefaultParams(sqlToolbox.defaultQueryConfig.getContext());
}
private static QueryContext contextWithSqlId(QueryContext queryContext)
{
// "bySegment" results are never valid to use with SQL because the result format is incompatible
// so, overwrite any user specified context to avoid exceptions down the line
if (queryContext.removeUserParam(QueryContexts.BY_SEGMENT_KEY) != null) {
log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter");
}
queryContext.addDefaultParam(PlannerContext.CTX_SQL_QUERY_ID, UUID.randomUUID().toString());
return queryContext;
}
public String sqlQueryId()
{
return queryPlus.context().getAsString(PlannerContext.CTX_SQL_QUERY_ID);
}
/**
* Validate SQL query and authorize against any datasources or views which
* will take part in the query. Must be called by the API methods, not
* directly.
*/
protected void validate(DruidPlanner planner)
{
plannerContext = planner.getPlannerContext();
plannerContext.setAuthenticationResult(queryPlus.authResult());
plannerContext.setParameters(queryPlus.parameters());
try {
planner.validate();
}
// We can't collapse catch clauses since SqlPlanningException has
// type-sensitive constructors.
catch (SqlParseException e) {
throw new SqlPlanningException(e);
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
/**
* Authorize the query using the authorizer provided, and an option to authorize
* context variables as well as query resources.
*/
protected void authorize(
DruidPlanner planner,
Function<Set<ResourceAction>, Access> authorizer
)
{
boolean authorizeContextParams = sqlToolbox.authConfig.authorizeQueryContextParams();
// Authentication is done by the planner using the function provided
// here. The planner ensures that this step is done before planning.
Access authorizationResult = planner.authorize(authorizer, authorizeContextParams);
if (!authorizationResult.isAllowed()) {
throw new ForbiddenException(authorizationResult.toMessage());
}
// Capture the query resources twice. The first is used to validate the request
// to cancel the query, and includes only the query-level resources. The second
// is used to report the resources actually authorized and includes the
// query context variables, if we are authorizing them.
cancellationResourceActions = planner.resourceActions(false);
fullResourceActions = planner.resourceActions(authorizeContextParams);
}
/**
* Resource authorizer based on the authentication result
* provided earlier.
*/
protected Function<Set<ResourceAction>, Access> authorizer()
{
return resourceActions ->
AuthorizationUtils.authorizeAllResourceActions(
queryPlus.authResult(),
resourceActions,
sqlToolbox.plannerFactory.getAuthorizerMapper()
);
}
/**
* Plan the query, which also produces the sequence that runs
* the query.
*/
protected PlannerResult plan(DruidPlanner planner)
{
try {
return planner.plan();
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
/**
* Return the datasource and table resources for this
* statement.
*/
public Set<ResourceAction> resources()
{
return cancellationResourceActions;
}
public Set<ResourceAction> allResources()
{
return fullResourceActions;
}
public SqlQueryPlus sqlRequest()
{
return queryPlus;
}
public SqlExecutionReporter reporter()
{
return reporter;
}
/**
* Releases resources and emits logs and metrics as defined the
* associated reporter.
*/
@Override
public void close()
{
try {
closeQuietly();
}
catch (Exception e) {
reporter.failed(e);
}
reporter.emit();
}
/**
* Closes the statement without reporting metrics.
*/
public void closeQuietly()
{
}
}

View File

@ -0,0 +1,179 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.sql.SqlLifecycleManager.Cancelable;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* Lifecycle for direct SQL statement execution, which means that the query
* is planned and executed in a single step, with no "prepare" step.
* Callers need call only:
* <ul>
* <li>{@link #execute()} to execute the query. The caller must close
* the returned {@code Sequence}.</li>
* <li>{@link #close()} to report metrics, or {@link #closeQuietly()}
* otherwise.</li>
* </ul>
* <p>
* The {@link #cancel()} method may be called from any thread and cancels
* the query.
* <p>
* All other methods are optional and are generally for introspection.
* <p>
* The class supports two threading models. In the simple case, the same
* thread creates this object and executes the query. In the split model,
* a request thread creates this object and plans the query. A separate
* response thread consumes results and performs any desired logging, etc.
* The object is transferred between threads, with no overlapping access.
* <p>
* As statement holds no resources and need not be called. Only the
* {@code Sequence} returned from {@link #execute()} need be closed.
* <p>
* Use this class for tests and JDBC execution. Use the HTTP variant,
* {@link HttpStatement} for HTTP requests.
*/
public class DirectStatement extends AbstractStatement implements Cancelable
{
private static final Logger log = new Logger(DirectStatement.class);
protected PrepareResult prepareResult;
protected PlannerResult plannerResult;
private volatile boolean canceled;
public DirectStatement(
final SqlToolbox lifecycleToolbox,
final SqlQueryPlus queryPlus,
final String remoteAddress
)
{
super(lifecycleToolbox, queryPlus, remoteAddress);
}
public DirectStatement(
final SqlToolbox lifecycleToolbox,
final SqlQueryPlus sqlRequest
)
{
super(lifecycleToolbox, sqlRequest, null);
}
/**
* Direct execution of a query, including:
* <ul>
* <li>Create the planner.</li>
* <li>Parse the statement.</li>
* <li>Provide parameters using a <a href="https://github.com/apache/druid/pull/6974">
* "query optimized"</a> structure.</li>
* <li>Validate the query against the Druid catalog.</li>
* <li>Authorize access to the resources which the query needs.</li>
* <li>Plan the query.</li>
* <li>Return a {@link Sequence} which executes the query and returns results.</li>
* </ul>
*
* This method is called from the request thread; results are read in the
* response thread.
*
* @return sequence which delivers query results
*/
public Sequence<Object[]> execute()
{
try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner(
queryPlus.sql(),
queryPlus.context())) {
validate(planner);
authorize(planner, authorizer());
// Adding the statement to the lifecycle manager allows cancellation.
// Tests cancel during this call; real clients might do so if the plan
// or execution prep stages take too long for some unexpected reason.
sqlToolbox.sqlLifecycleManager.add(sqlQueryId(), this);
checkCanceled();
plannerResult = plan(planner);
prepareResult = planner.prepareResult();
return doExecute();
}
catch (RuntimeException e) {
reporter.failed(e);
throw e;
}
}
public PrepareResult prepareResult()
{
return prepareResult;
}
/**
* Do the actual execute step which allows subclasses to wrap the sequence,
* as is sometimes needed for testing.
*/
protected Sequence<Object[]> doExecute()
{
// Check cancellation here and not in execute() above:
// required for SqlResourceTest to work.
checkCanceled();
try {
return plannerResult.run();
}
catch (RuntimeException e) {
reporter.failed(e);
throw e;
}
}
/**
* Checks for cancellation. As it turns out, this is really just a test-time
* check: an actual client can't cancel the query until the query reports
* a query ID, which won't happen until after the {@link #execute())}
* call.
*/
private void checkCanceled()
{
if (canceled) {
throw new QueryInterruptedException(
QueryInterruptedException.QUERY_CANCELED,
StringUtils.format("Query is canceled [%s]", sqlQueryId()),
null,
null
);
}
}
@Override
public void cancel()
{
canceled = true;
final CopyOnWriteArrayList<String> nativeQueryIds = plannerContext.getNativeQueryIds();
for (String nativeQueryId : nativeQueryIds) {
log.debug("Canceling native query [%s]", nativeQueryId);
sqlToolbox.queryScheduler.cancelQuery(nativeQueryId);
}
}
}

View File

@ -0,0 +1,77 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.http.SqlQuery;
import javax.servlet.http.HttpServletRequest;
import java.util.Set;
import java.util.function.Function;
/**
* SQL statement lifecycle for the HTTP endpoint. The request thread
* creates the object and calls {@link #execute()}. The response thread
* reads results and inspects the statement contents to emit logs and
* metrics. The object is transferred between threads, with no overlapping
* access.
* <p>
* The key extension of an HTTP statement is the use of the HTTP request
* for authorization.
*/
public class HttpStatement extends DirectStatement
{
private final HttpServletRequest req;
public HttpStatement(
final SqlToolbox lifecycleToolbox,
final SqlQuery sqlQuery,
final HttpServletRequest req
)
{
super(
lifecycleToolbox,
SqlQueryPlus.builder(sqlQuery)
.auth(AuthorizationUtils.authenticationResultFromRequest(req))
.build(),
req.getRemoteAddr()
);
this.req = req;
}
@Override
protected Function<Set<ResourceAction>, Access> authorizer()
{
return resourceActions ->
AuthorizationUtils.authorizeAllResourceActions(
req,
resourceActions,
sqlToolbox.plannerFactory.getAuthorizerMapper()
);
}
public SqlRowTransformer createRowTransformer()
{
return new SqlRowTransformer(plannerContext.getTimeZone(), plannerResult.rowType());
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import java.util.List;
/**
* Statement for the JDBC prepare-once, execute many model.
*/
public class PreparedStatement extends AbstractStatement
{
private final SqlQueryPlus originalRequest;
private PrepareResult prepareResult;
public PreparedStatement(
final SqlToolbox lifecycleToolbox,
final SqlQueryPlus queryPlus
)
{
super(lifecycleToolbox, queryPlus, null);
this.originalRequest = queryPlus;
}
/**
* Prepare the query lifecycle for execution, without completely planning into
* something that is executable, but including some initial parsing and
* validation, to support prepared statements via JDBC.
* <p>
* Note that, per JDBC convention, the prepare step does not provide
* parameter values: those are provided later during execution and will generally
* vary from one execution to the next.
*
* <ul>
* <li>Create the planner.</li>
* <li>Parse the statement.</li>
* <li>JDBC does not provide parameter values at prepare time.
* They are provided during execution later, where we'll replan the
* query to use the <a href="https://github.com/apache/druid/pull/6974">
* "query optimized"</a> structure.</li>
* <li>Validate the query against the Druid catalog.</li>
* <li>Authorize access to the resources which the query needs.</li>
* <li>Return a {@link PrepareResult} which describes the query.</li>
* </ul>
*/
public PrepareResult prepare()
{
try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner(
queryPlus.sql(),
queryPlus.context())) {
validate(planner);
authorize(planner, authorizer());
// Do the prepare step.
try {
this.prepareResult = planner.prepare();
return prepareResult;
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
catch (RuntimeException e) {
reporter.failed(e);
throw e;
}
}
/**
* Execute a prepared JDBC query. Druid uses
* <a href="https://github.com/apache/druid/pull/6974">
* "query optimized"</a> parameters, which means we do not reuse the statement
* prepared above, but rather plan anew with the actual parameter values. The
* same statement can be execute many times, including concurrently. Each
* execution repeats the parse, validate, authorize and plan steps since
* data, permissions, views and other dependencies may have changed.
*/
public DirectStatement execute(List<TypedValue> parameters)
{
return new DirectStatement(
sqlToolbox,
originalRequest.withParameters(parameters)
);
}
}

View File

@ -0,0 +1,146 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.server.QueryStats;
import org.apache.druid.server.RequestLogLine;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* Side-car class which reports logs and metrics for an
* {@link HttpStatement}. This separate class cleanly separates the logic
* for running a query from the logic for reporting on that run. A query
* can end either with a success or error. This object is created in
* the request thread, with the remaining method called either from the
* request or response thread, but not both.
*/
public class SqlExecutionReporter
{
private static final Logger log = new Logger(SqlExecutionReporter.class);
private final AbstractStatement stmt;
private final String remoteAddress;
private final long startMs;
private final long startNs;
private Throwable e;
private long bytesWritten;
public SqlExecutionReporter(
final AbstractStatement stmt,
final String remoteAddress
)
{
this.stmt = stmt;
this.remoteAddress = remoteAddress;
this.startMs = System.currentTimeMillis();
this.startNs = System.nanoTime();
}
public void failed(Throwable e)
{
this.e = e;
}
public void succeeded(final long bytesWritten)
{
this.bytesWritten = bytesWritten;
}
public void emit()
{
final boolean success = e == null;
final long queryTimeNs = System.nanoTime() - startNs;
ServiceEmitter emitter = stmt.sqlToolbox.emitter;
PlannerContext plannerContext = stmt.plannerContext;
try {
ServiceMetricEvent.Builder metricBuilder = ServiceMetricEvent.builder();
if (plannerContext != null) {
metricBuilder.setDimension("id", plannerContext.getSqlQueryId());
metricBuilder.setDimension("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
if (stmt.fullResourceActions != null) {
metricBuilder.setDimension(
"dataSource",
stmt.fullResourceActions
.stream()
.map(action -> action.getResource().getName())
.collect(Collectors.toList())
.toString()
);
}
metricBuilder.setDimension("remoteAddress", StringUtils.nullToEmptyNonDruidDataString(remoteAddress));
metricBuilder.setDimension("success", String.valueOf(success));
emitter.emit(metricBuilder.build("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)));
if (bytesWritten >= 0) {
emitter.emit(metricBuilder.build("sqlQuery/bytes", bytesWritten));
}
final Map<String, Object> statsMap = new LinkedHashMap<>();
statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs));
statsMap.put("sqlQuery/bytes", bytesWritten);
statsMap.put("success", success);
QueryContext queryContext;
if (plannerContext == null) {
queryContext = stmt.queryPlus.context();
} else {
statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity());
queryContext = stmt.queryPlus.context();
queryContext.addSystemParam("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
final Map<String, Object> context = queryContext.getMergedParams();
statsMap.put("context", context);
if (e != null) {
statsMap.put("exception", e.toString());
if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) {
statsMap.put("interrupted", true);
statsMap.put("reason", e.toString());
}
}
stmt.sqlToolbox.requestLogger.logSqlQuery(
RequestLogLine.forSql(
stmt.queryPlus.sql(),
context,
DateTimes.utc(startMs),
remoteAddress,
new QueryStats(statsMap)
)
);
}
catch (Exception ex) {
log.error(ex, "Unable to log SQL [%s]!", stmt.queryPlus.sql());
}
}
}

View File

@ -1,592 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.SequenceWrapper;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.DefaultQueryConfig;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.server.QueryScheduler;
import org.apache.druid.server.QueryStats;
import org.apache.druid.server.RequestLogLine;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.table.RowSignatures;
import org.apache.druid.sql.http.SqlParameter;
import org.apache.druid.sql.http.SqlQuery;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Similar to {@link org.apache.druid.server.QueryLifecycle}, this class manages the lifecycle of a SQL query.
* It ensures that a SQL query goes through the following stages, in the proper order:
*
* <ol>
* <li>Initialization ({@link #initialize(String, QueryContext)})</li>
* <li>Validation and Authorization ({@link #validateAndAuthorize(HttpServletRequest)} or {@link #validateAndAuthorize(AuthenticationResult)})</li>
* <li>Planning ({@link #plan()})</li>
* <li>Execution ({@link #execute()})</li>
* <li>Logging ({@link #finalizeStateAndEmitLogsAndMetrics(Throwable, String, long)})</li>
* </ol>
*
* Every method in this class must be called by the same thread except for {@link #cancel()}.
*/
public class SqlLifecycle
{
private static final Logger log = new Logger(SqlLifecycle.class);
private final PlannerFactory plannerFactory;
private final ServiceEmitter emitter;
private final RequestLogger requestLogger;
private final QueryScheduler queryScheduler;
private final AuthConfig authConfig;
private final DefaultQueryConfig defaultQueryConfig;
private final long startMs;
private final long startNs;
/**
* This lock coordinates the access to {@link #state} as there is a happens-before relationship
* between {@link #cancel} and {@link #transition}.
*/
private final Object stateLock = new Object();
@GuardedBy("stateLock")
private State state = State.NEW;
// init during initialize
private String sql;
private QueryContext queryContext;
private List<TypedValue> parameters;
// init during plan
/**
* The Druid planner follows the SQL statement through the lifecycle.
* The planner's state is start --> validate --> (prepare | plan).
*/
private DruidPlanner planner;
private PlannerContext plannerContext;
private PrepareResult prepareResult;
private Set<ResourceAction> resourceActions;
private PlannerResult plannerResult;
public SqlLifecycle(
PlannerFactory plannerFactory,
ServiceEmitter emitter,
RequestLogger requestLogger,
QueryScheduler queryScheduler,
AuthConfig authConfig,
DefaultQueryConfig defaultQueryConfig,
long startMs,
long startNs
)
{
this.plannerFactory = plannerFactory;
this.emitter = emitter;
this.requestLogger = requestLogger;
this.queryScheduler = queryScheduler;
this.authConfig = authConfig;
this.defaultQueryConfig = defaultQueryConfig;
this.startMs = startMs;
this.startNs = startNs;
this.parameters = Collections.emptyList();
}
/**
* Initialize the query lifecycle, setting the raw string SQL, initial query context, and assign a sql query id.
*
* If successful (it will be), it will transition the lifecycle to {@link State#INITIALIZED}.
*/
public String initialize(String sql, QueryContext queryContext)
{
transition(State.NEW, State.INITIALIZED);
this.sql = sql;
this.queryContext = contextWithSqlId(queryContext);
this.queryContext.addDefaultParams(defaultQueryConfig.getContext());
return sqlQueryId();
}
private QueryContext contextWithSqlId(QueryContext queryContext)
{
// "bySegment" results are never valid to use with SQL because the result format is incompatible
// so, overwrite any user specified context to avoid exceptions down the line
if (queryContext.removeUserParam(QueryContexts.BY_SEGMENT_KEY) != null) {
log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter");
}
queryContext.addDefaultParam(PlannerContext.CTX_SQL_QUERY_ID, UUID.randomUUID().toString());
return queryContext;
}
private String sqlQueryId()
{
return queryContext.getAsString(PlannerContext.CTX_SQL_QUERY_ID);
}
/**
* Assign dynamic parameters to be used to substitute values during query execution. This can be performed at any
* part of the lifecycle.
*/
public void setParameters(List<TypedValue> parameters)
{
this.parameters = parameters;
if (this.plannerContext != null) {
this.plannerContext.setParameters(parameters);
}
}
/**
* Validate SQL query and authorize against any datasources or views which will take part in the query.
*
* If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to
* {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}.
*/
public void validateAndAuthorize(AuthenticationResult authenticationResult)
{
synchronized (stateLock) {
if (state == State.AUTHORIZED) {
return;
}
}
transition(State.INITIALIZED, State.AUTHORIZING);
validate(authenticationResult);
doAuthorize(resourceActions ->
AuthorizationUtils.authorizeAllResourceActions(
authenticationResult,
resourceActions,
plannerFactory.getAuthorizerMapper()
)
);
}
/**
* Validate SQL query and authorize against any datasources or views which the query. Like
* {@link #validateAndAuthorize(AuthenticationResult)} but for a {@link HttpServletRequest}.
*
* If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to
* {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}.
*/
public void validateAndAuthorize(HttpServletRequest req)
{
transition(State.INITIALIZED, State.AUTHORIZING);
AuthenticationResult authResult = AuthorizationUtils.authenticationResultFromRequest(req);
validate(authResult);
doAuthorize(resourceActions ->
AuthorizationUtils.authorizeAllResourceActions(
req,
resourceActions,
plannerFactory.getAuthorizerMapper()
)
);
}
/**
* Perform the validation step on the Druid planner, leaving the planner
* ready to perform either prepare or plan.
*/
private void validate(AuthenticationResult authenticationResult)
{
try {
planner = plannerFactory.createPlanner(sql, queryContext);
// set planner context for logs/metrics in case something explodes early
plannerContext = planner.getPlannerContext();
plannerContext.setAuthenticationResult(authenticationResult);
// set parameters on planner context, if parameters have already been set
plannerContext.setParameters(parameters);
planner.validate();
// Capture the resource actions as these are reference past the
// life of the planner itself.
resourceActions = planner.resourceActions(authConfig.authorizeQueryContextParams());
}
// we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors.
catch (SqlParseException e) {
throw new SqlPlanningException(e);
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
private void doAuthorize(Function<Set<ResourceAction>, Access> authorizer)
{
Access authorizationResult = planner.authorize(
authorizer,
authConfig.authorizeQueryContextParams()
);
if (!authorizationResult.isAllowed()) {
// Not authorized; go straight to Jail, do not pass Go.
transition(State.AUTHORIZING, State.UNAUTHORIZED);
} else {
transition(State.AUTHORIZING, State.AUTHORIZED);
}
if (!authorizationResult.isAllowed()) {
throw new ForbiddenException(authorizationResult.toString());
}
}
/**
* Prepare the query lifecycle for execution, without completely planning into
* something that is executable, but including some initial parsing and
* validation and any dynamic parameter type resolution, to support prepared
* statements via JDBC.
*
* The planner must have already performed the validation step: the planner
* state is reused here.
*/
public PrepareResult prepare()
{
synchronized (stateLock) {
if (state != State.AUTHORIZED) {
throw new ISE("Cannot prepare because current state [%s] is not [%s].", state, State.AUTHORIZED);
}
}
Preconditions.checkNotNull(plannerContext, "Cannot prepare, plannerContext is null");
try {
this.prepareResult = planner.prepare();
return prepareResult;
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
/**
* Plan the query to enable execution.
*
* The planner must have already performed the validation step: the planner
* state is reused here.
*
* If successful, the lifecycle will first transition from
* {@link State#AUTHORIZED} to {@link State#PLANNED}.
*/
public void plan() throws RelConversionException
{
transition(State.AUTHORIZED, State.PLANNED);
Preconditions.checkNotNull(plannerContext, "Cannot plan, plannerContext is null");
try {
this.plannerResult = planner.plan();
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
finally {
// Done with the planner, close it.
planner.close();
planner = null;
}
}
/**
* This method must be called after {@link #plan()}.
*/
public SqlRowTransformer createRowTransformer()
{
assert plannerContext != null;
assert plannerResult != null;
return new SqlRowTransformer(plannerContext.getTimeZone(), plannerResult.rowType());
}
@VisibleForTesting
PlannerContext getPlannerContext()
{
return plannerContext;
}
/**
* Execute the fully planned query.
*
* If successful, the lifecycle will first transition from {@link State#PLANNED} to {@link State#EXECUTING}.
*/
public Sequence<Object[]> execute()
{
transition(State.PLANNED, State.EXECUTING);
return plannerResult.run();
}
/**
* Only for testing... returns result row signature and sequence of results
*/
@VisibleForTesting
public Pair<RowSignature, Sequence<Object[]>> runSimple(
String sql,
Map<String, Object> queryContext,
List<SqlParameter> parameters,
AuthenticationResult authenticationResult
) throws RelConversionException
{
Sequence<Object[]> result;
initialize(sql, new QueryContext(queryContext));
try {
setParameters(SqlQuery.getParameterList(parameters));
validateAndAuthorize(authenticationResult);
plan();
result = execute();
}
catch (Throwable e) {
if (!(e instanceof ForbiddenException)) {
finalizeStateAndEmitLogsAndMetrics(e, null, -1);
}
throw e;
}
return new Pair<>(
RowSignatures.fromRelDataType(plannerResult.rowType().getFieldNames(), plannerResult.rowType()),
Sequences.wrap(
result,
new SequenceWrapper()
{
@Override
public void after(boolean isDone, Throwable thrown)
{
finalizeStateAndEmitLogsAndMetrics(thrown, null, -1);
}
}
)
);
}
@VisibleForTesting
public Set<ResourceAction> runAnalyzeResources(AuthenticationResult authenticationResult)
{
validate(authenticationResult);
return getRequiredResourceActions();
}
public Set<ResourceAction> getRequiredResourceActions()
{
return resourceActions;
}
/**
* Cancel all native queries associated with this lifecycle.
*
* This method is thread-safe.
*/
public void cancel()
{
synchronized (stateLock) {
if (state == State.CANCELLED) {
return;
}
state = State.CANCELLED;
}
final CopyOnWriteArrayList<String> nativeQueryIds = plannerContext.getNativeQueryIds();
for (String nativeQueryId : nativeQueryIds) {
log.debug("Canceling native query [%s]", nativeQueryId);
queryScheduler.cancelQuery(nativeQueryId);
}
}
/**
* Emit logs and metrics for this query.
*
* @param e exception that occurred while processing this query
* @param remoteAddress remote address, for logging; or null if unknown
* @param bytesWritten number of bytes written; will become a query/bytes metric if >= 0
*/
public void finalizeStateAndEmitLogsAndMetrics(
@Nullable final Throwable e,
@Nullable final String remoteAddress,
final long bytesWritten
)
{
if (queryContext == null) {
// Never initialized, don't log or emit anything.
return;
}
synchronized (stateLock) {
assert state != State.UNAUTHORIZED; // should not emit below metrics when the query fails to authorize
if (state != State.CANCELLED) {
if (state == State.DONE) {
log.warn("Tried to emit logs and metrics twice for query [%s]!", sqlQueryId());
}
state = State.DONE;
}
}
final Set<ResourceAction> actions;
if (planner != null) {
actions = getRequiredResourceActions();
planner.close();
planner = null;
} else {
actions = null;
}
final boolean success = e == null;
final long queryTimeNs = System.nanoTime() - startNs;
try {
ServiceMetricEvent.Builder metricBuilder = ServiceMetricEvent.builder();
if (plannerContext != null) {
metricBuilder.setDimension("id", plannerContext.getSqlQueryId());
metricBuilder.setDimension("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
if (actions != null) {
metricBuilder.setDimension(
"dataSource",
actions
.stream()
.map(action -> action.getResource().getName())
.collect(Collectors.toList())
.toString()
);
}
metricBuilder.setDimension("remoteAddress", StringUtils.nullToEmptyNonDruidDataString(remoteAddress));
metricBuilder.setDimension("success", String.valueOf(success));
emitter.emit(metricBuilder.build("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)));
if (bytesWritten >= 0) {
emitter.emit(metricBuilder.build("sqlQuery/bytes", bytesWritten));
}
final Map<String, Object> statsMap = new LinkedHashMap<>();
statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs));
statsMap.put("sqlQuery/bytes", bytesWritten);
statsMap.put("success", success);
if (plannerContext != null) {
statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity());
queryContext.addSystemParam("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
final Map<String, Object> context = queryContext.getMergedParams();
statsMap.put("context", context);
if (e != null) {
statsMap.put("exception", e.toString());
if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) {
statsMap.put("interrupted", true);
statsMap.put("reason", e.toString());
}
}
requestLogger.logSqlQuery(
RequestLogLine.forSql(
sql,
context,
DateTimes.utc(startMs),
remoteAddress,
new QueryStats(statsMap)
)
);
}
catch (Exception ex) {
log.error(ex, "Unable to log SQL [%s]!", sql);
}
}
@VisibleForTesting
public State getState()
{
synchronized (stateLock) {
return state;
}
}
@VisibleForTesting
QueryContext getQueryContext()
{
return queryContext;
}
private void transition(final State from, final State to)
{
synchronized (stateLock) {
if (state == State.CANCELLED) {
throw new QueryInterruptedException(
QueryInterruptedException.QUERY_CANCELLED,
StringUtils.format("Query is canceled [%s]", sqlQueryId()),
null,
null
);
}
if (state != from) {
throw new ISE(
"Cannot transition from [%s] to [%s] because current state [%s] is not [%s].",
from,
to,
state,
from
);
}
state = to;
}
}
enum State
{
NEW,
INITIALIZED,
AUTHORIZING,
AUTHORIZED,
PLANNED,
EXECUTING,
// final states
UNAUTHORIZED,
CANCELLED, // query is cancelled. can be transitioned to this state only after AUTHORIZED.
DONE // query could either succeed or fail
}
}

View File

@ -22,24 +22,27 @@ package org.apache.druid.sql;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.sql.SqlLifecycle.State;
import org.apache.druid.server.security.ResourceAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This class manages only _authorized_ {@link SqlLifecycle}s submitted via HTTP,
* such as {@link org.apache.druid.sql.http.SqlResource}. The main use case of this class is
* tracking running queries so that the cancel API can identify the lifecycles to cancel.
* This class manages only <i>authorized</i> {@link DirectStatement}s submitted via
* HTTP, such as {@link org.apache.druid.sql.http.SqlResource}. The main use case of
* this class is tracking running queries so that the cancel API can identify
* the statements to cancel.
*
* This class is thread-safe as there are 2 or more threads that can access lifecycles at the same time
* for query running or query canceling.
* This class is thread-safe as there are 2 or more threads that can access
* statements at the same time for query running or query canceling.
*
* For managing and canceling native queries, see {@link org.apache.druid.server.QueryScheduler}.
* As its name indicates, it also performs resource scheduling for native queries based on query lanes
* For managing and canceling native queries, see
* {@link org.apache.druid.server.QueryScheduler}. As its name indicates, it
* also performs resource scheduling for native queries based on query lanes
* {@link org.apache.druid.server.QueryLaningStrategy}.
*
* @see org.apache.druid.server.QueryScheduler#cancelQuery(String)
@ -47,15 +50,20 @@ import java.util.Map;
@LazySingleton
public class SqlLifecycleManager
{
public interface Cancelable
{
Set<ResourceAction> resources();
void cancel();
}
private final Object lock = new Object();
@GuardedBy("lock")
private final Map<String, List<SqlLifecycle>> sqlLifecycles = new HashMap<>();
private final Map<String, List<Cancelable>> sqlLifecycles = new HashMap<>();
public void add(String sqlQueryId, SqlLifecycle lifecycle)
public void add(String sqlQueryId, Cancelable lifecycle)
{
synchronized (lock) {
assert lifecycle.getState() == State.AUTHORIZED;
sqlLifecycles.computeIfAbsent(sqlQueryId, k -> new ArrayList<>())
.add(lifecycle);
}
@ -65,10 +73,10 @@ public class SqlLifecycleManager
* Removes the given lifecycle of the given query ID.
* This method uses {@link Object#equals} to find the lifecycle matched to the given parameter.
*/
public void remove(String sqlQueryId, SqlLifecycle lifecycle)
public void remove(String sqlQueryId, Cancelable lifecycle)
{
synchronized (lock) {
List<SqlLifecycle> lifecycles = sqlLifecycles.get(sqlQueryId);
List<Cancelable> lifecycles = sqlLifecycles.get(sqlQueryId);
if (lifecycles != null) {
lifecycles.remove(lifecycle);
if (lifecycles.isEmpty()) {
@ -82,10 +90,10 @@ public class SqlLifecycleManager
* For the given sqlQueryId, this method removes all lifecycles that match to the given list of lifecycles.
* This method uses {@link Object#equals} for matching lifecycles.
*/
public void removeAll(String sqlQueryId, List<SqlLifecycle> lifecyclesToRemove)
public void removeAll(String sqlQueryId, List<Cancelable> lifecyclesToRemove)
{
synchronized (lock) {
List<SqlLifecycle> lifecycles = sqlLifecycles.get(sqlQueryId);
List<Cancelable> lifecycles = sqlLifecycles.get(sqlQueryId);
if (lifecycles != null) {
lifecycles.removeAll(lifecyclesToRemove);
if (lifecycles.isEmpty()) {
@ -98,10 +106,10 @@ public class SqlLifecycleManager
/**
* Returns a snapshot of the lifecycles for the given sqlQueryId.
*/
public List<SqlLifecycle> getAll(String sqlQueryId)
public List<Cancelable> getAll(String sqlQueryId)
{
synchronized (lock) {
List<SqlLifecycle> lifecycles = sqlLifecycles.get(sqlQueryId);
List<Cancelable> lifecycles = sqlLifecycles.get(sqlQueryId);
return lifecycles == null ? Collections.emptyList() : ImmutableList.copyOf(lifecycles);
}
}

View File

@ -31,11 +31,14 @@ import java.util.List;
import java.util.Map;
/**
* Captures the inputs to a SQL execution request: the statement,
* the context, parameters, and the authorization result. Pass this
* around rather than the quad of items. The request can evolve:
* items can be filled in later as needed (except for the SQL
* and auth result, which are required.)
* Captures the inputs to a SQL execution request: the statement,the context,
* parameters, and the authorization result. Pass this around rather than the
* quad of items. The request can evolve: the context and parameters can be
* filled in later as needed.
* <p>
* SQL requests come from a variety of sources in a variety of formats. Use
* the {@link Builder} class to create an instance from the information
* available at each point in the code.
*/
public class SqlQueryPlus
{
@ -61,39 +64,19 @@ public class SqlQueryPlus
this.authResult = Preconditions.checkNotNull(authResult);
}
public SqlQueryPlus(final String sql, final AuthenticationResult authResult)
public static Builder builder()
{
this(sql, (QueryContext) null, null, authResult);
return new Builder();
}
public static SqlQueryPlus fromSqlParameters(
String sql,
Map<String, Object> queryContext,
List<SqlParameter> parameters,
AuthenticationResult authResult
)
public static Builder builder(String sql)
{
return new SqlQueryPlus(
sql,
queryContext == null ? null : new QueryContext(queryContext),
parameters == null ? null : SqlQuery.getParameterList(parameters),
authResult
);
return new Builder().sql(sql);
}
public static SqlQueryPlus from(
String sql,
Map<String, Object> queryContext,
List<TypedValue> parameters,
AuthenticationResult authResult
)
public static Builder builder(SqlQuery sqlQuery)
{
return new SqlQueryPlus(
sql,
queryContext == null ? null : new QueryContext(queryContext),
parameters,
authResult
);
return new Builder().query(sqlQuery);
}
public String sql()
@ -121,8 +104,75 @@ public class SqlQueryPlus
return new SqlQueryPlus(sql, context, parameters, authResult);
}
public SqlQueryPlus withContext(Map<String, Object> context)
{
return new SqlQueryPlus(sql, new QueryContext(context), parameters, authResult);
}
public SqlQueryPlus withParameters(List<TypedValue> parameters)
{
return new SqlQueryPlus(sql, queryContext, parameters, authResult);
}
public static class Builder
{
private String sql;
private QueryContext queryContext;
private List<TypedValue> parameters;
private AuthenticationResult authResult;
public Builder sql(String sql)
{
this.sql = sql;
return this;
}
public Builder query(SqlQuery sqlQuery)
{
this.sql = sqlQuery.getQuery();
this.queryContext = new QueryContext(sqlQuery.getContext());
this.parameters = sqlQuery.getParameterList();
return this;
}
public Builder context(QueryContext queryContext)
{
this.queryContext = queryContext;
return this;
}
public Builder context(Map<String, Object> queryContext)
{
this.queryContext = queryContext == null ? null : new QueryContext(queryContext);
return this;
}
public Builder parameters(List<TypedValue> parameters)
{
this.parameters = parameters;
return this;
}
public Builder sqlParameters(List<SqlParameter> parameters)
{
this.parameters = parameters == null ? null : SqlQuery.getParameterList(parameters);
return this;
}
public Builder auth(final AuthenticationResult authResult)
{
this.authResult = authResult;
return this;
}
public SqlQueryPlus build()
{
return new SqlQueryPlus(
sql,
queryContext,
parameters,
authResult
);
}
}
}

View File

@ -28,46 +28,52 @@ import org.apache.druid.server.QueryScheduler;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.http.SqlQuery;
import javax.servlet.http.HttpServletRequest;
@LazySingleton
public class SqlLifecycleFactory
public class SqlStatementFactory
{
private final PlannerFactory plannerFactory;
private final ServiceEmitter emitter;
private final RequestLogger requestLogger;
private final QueryScheduler queryScheduler;
private final AuthConfig authConfig;
private final DefaultQueryConfig defaultQueryConfig;
protected final SqlToolbox lifecycleToolbox;
@Inject
public SqlLifecycleFactory(
PlannerFactory plannerFactory,
ServiceEmitter emitter,
RequestLogger requestLogger,
QueryScheduler queryScheduler,
AuthConfig authConfig,
Supplier<DefaultQueryConfig> defaultQueryConfig
public SqlStatementFactory(
final PlannerFactory plannerFactory,
final ServiceEmitter emitter,
final RequestLogger requestLogger,
final QueryScheduler queryScheduler,
final AuthConfig authConfig,
final Supplier<DefaultQueryConfig> defaultQueryConfig,
final SqlLifecycleManager sqlLifecycleManager
)
{
this.plannerFactory = plannerFactory;
this.emitter = emitter;
this.requestLogger = requestLogger;
this.queryScheduler = queryScheduler;
this.authConfig = authConfig;
this.defaultQueryConfig = defaultQueryConfig.get();
}
public SqlLifecycle factorize()
{
return new SqlLifecycle(
this.lifecycleToolbox = new SqlToolbox(
plannerFactory,
emitter,
requestLogger,
queryScheduler,
authConfig,
defaultQueryConfig,
System.currentTimeMillis(),
System.nanoTime()
defaultQueryConfig.get(),
sqlLifecycleManager
);
}
public HttpStatement httpStatement(
final SqlQuery sqlQuery,
final HttpServletRequest req
)
{
return new HttpStatement(lifecycleToolbox, sqlQuery, req);
}
public DirectStatement directStatement(final SqlQueryPlus sqlRequest)
{
return new DirectStatement(lifecycleToolbox, sqlRequest);
}
public PreparedStatement preparedStatement(final SqlQueryPlus sqlRequest)
{
return new PreparedStatement(lifecycleToolbox, sqlRequest);
}
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.base.Preconditions;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.DefaultQueryConfig;
import org.apache.druid.server.QueryScheduler;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
/**
* Provides the plan and execution resources to process SQL queries.
*/
public class SqlToolbox
{
final PlannerFactory plannerFactory;
final ServiceEmitter emitter;
final RequestLogger requestLogger;
final QueryScheduler queryScheduler;
final AuthConfig authConfig;
final DefaultQueryConfig defaultQueryConfig;
final SqlLifecycleManager sqlLifecycleManager;
public SqlToolbox(
final PlannerFactory plannerFactory,
final ServiceEmitter emitter,
final RequestLogger requestLogger,
final QueryScheduler queryScheduler,
final AuthConfig authConfig,
final DefaultQueryConfig defaultQueryConfig,
final SqlLifecycleManager sqlLifecycleManager
)
{
this.plannerFactory = plannerFactory;
this.emitter = emitter;
this.requestLogger = requestLogger;
this.queryScheduler = queryScheduler;
this.authConfig = authConfig;
this.defaultQueryConfig = defaultQueryConfig;
this.sqlLifecycleManager = Preconditions.checkNotNull(sqlLifecycleManager, "sqlLifecycleManager");
}
}

View File

@ -35,7 +35,6 @@ import java.sql.Array;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
/**
* Common implementation for the JDBC {@code Statement} and
@ -55,16 +54,17 @@ public abstract class AbstractDruidJdbcStatement implements Closeable
{
public static final long START_OFFSET = 0;
protected final DruidConnection connection;
protected final String connectionId;
protected final int statementId;
protected Throwable throwable;
protected DruidJdbcResultSet resultSet;
public AbstractDruidJdbcStatement(
final DruidConnection connection,
final String connectionId,
final int statementId
)
{
this.connection = Preconditions.checkNotNull(connection, "connection");
this.connectionId = Preconditions.checkNotNull(connectionId, "connectionId");
this.statementId = statementId;
}
@ -242,16 +242,11 @@ public abstract class AbstractDruidJdbcStatement implements Closeable
public String getConnectionId()
{
return connection.getConnectionId();
return connectionId;
}
public int getStatementId()
{
return statementId;
}
public ExecutorService executor()
{
return connection.executor();
}
}

View File

@ -20,22 +20,19 @@
package org.apache.druid.sql.avatica;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.calcite.tools.RelConversionException;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QueryContext;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.PreparedStatement;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.SqlStatementFactory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@ -49,12 +46,13 @@ public class DruidConnection
private final String connectionId;
private final int maxStatements;
private final Map<String, Object> userSecret;
private final Map<String, Object> context;
private final ImmutableMap<String, Object> userSecret;
private final QueryContext context;
private final AtomicInteger statementCounter = new AtomicInteger();
private final AtomicReference<Future<?>> timeoutFuture = new AtomicReference<>();
private final ExecutorService yielderOpenCloseExecutor;
// Typically synchronized by connectionLock, except in one case: the onClose function passed
// into DruidStatements contained by the map.
@GuardedBy("connectionLock")
private final ConcurrentMap<Integer, AbstractDruidJdbcStatement> statements = new ConcurrentHashMap<>();
private final Object connectionLock = new Object();
@ -66,19 +64,13 @@ public class DruidConnection
final String connectionId,
final int maxStatements,
final Map<String, Object> userSecret,
final Map<String, Object> context
final QueryContext context
)
{
this.connectionId = Preconditions.checkNotNull(connectionId);
this.maxStatements = maxStatements;
this.userSecret = ImmutableMap.copyOf(userSecret);
this.context = Preconditions.checkNotNull(context);
this.yielderOpenCloseExecutor = Execs.singleThreaded(
StringUtils.format(
"JDBCYielderOpenCloseExecutor-connection-%s",
StringUtils.encodeForFormat(connectionId)
)
);
this.context = context;
}
public String getConnectionId()
@ -86,16 +78,7 @@ public class DruidConnection
return connectionId;
}
public QueryContext makeContext()
{
// QueryContext constructor copies the context parameters.
// we don't want to stringify arrays for JDBC ever because Avatica needs to handle this
final QueryContext queryContext = new QueryContext(context);
queryContext.addSystemParam(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false);
return queryContext;
}
public DruidJdbcStatement createStatement(SqlLifecycleFactory sqlLifecycleFactory)
public DruidJdbcStatement createStatement(SqlStatementFactory sqlLifecycleFactory)
{
final int statementId = statementCounter.incrementAndGet();
@ -112,8 +95,9 @@ public class DruidConnection
@SuppressWarnings("GuardedBy")
final DruidJdbcStatement statement = new DruidJdbcStatement(
this,
connectionId,
statementId,
context,
sqlLifecycleFactory
);
@ -124,8 +108,8 @@ public class DruidConnection
}
public DruidJdbcPreparedStatement createPreparedStatement(
SqlLifecycleFactory sqlLifecycleFactory,
SqlQueryPlus queryPlus,
SqlStatementFactory sqlLifecycleFactory,
SqlQueryPlus sqlRequest,
final long maxRowCount)
{
final int statementId = statementCounter.incrementAndGet();
@ -141,14 +125,16 @@ public class DruidConnection
throw DruidMeta.logFailure(new ISE("Too many open statements, limit is [%,d]", maxStatements));
}
@SuppressWarnings("GuardedBy")
final PreparedStatement statement = sqlLifecycleFactory.preparedStatement(
sqlRequest.withContext(context)
);
final DruidJdbcPreparedStatement jdbcStmt = new DruidJdbcPreparedStatement(
this,
connectionId,
statementId,
queryPlus,
sqlLifecycleFactory,
statement,
maxRowCount
);
jdbcStmt.prepare();
statements.put(statementId, jdbcStmt);
LOG.debug("Connection [%s] opened prepared statement [%s].", connectionId, statementId);
@ -156,16 +142,6 @@ public class DruidConnection
}
}
public void prepareAndExecute(
final DruidJdbcStatement druidStatement,
final SqlQueryPlus queryPlus,
final long maxRowCount
) throws RelConversionException
{
Preconditions.checkNotNull(context, "JDBC connection context is null!");
druidStatement.execute(queryPlus.withContext(makeContext()), maxRowCount);
}
public AbstractDruidJdbcStatement getStatement(final int statementId)
{
synchronized (connectionLock) {
@ -205,8 +181,8 @@ public class DruidConnection
public void close()
{
synchronized (connectionLock) {
open = false;
for (AbstractDruidJdbcStatement statement : statements.values()) {
// Copy statements before iterating because statement.close() modifies it.
for (AbstractDruidJdbcStatement statement : ImmutableList.copyOf(statements.values())) {
try {
statement.close();
}
@ -214,9 +190,9 @@ public class DruidConnection
LOG.warn("Connection [%s] failed to close statement [%s]!", connectionId, statement.getStatementId());
}
}
statements.clear();
yielderOpenCloseExecutor.shutdownNow();
LOG.debug("Connection [%s] closed.", connectionId);
open = false;
}
}
@ -233,9 +209,4 @@ public class DruidConnection
{
return userSecret;
}
public ExecutorService executor()
{
return yielderOpenCloseExecutor;
}
}

View File

@ -24,9 +24,8 @@ import org.apache.calcite.avatica.Meta;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.PreparedStatement;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import java.util.List;
@ -42,54 +41,46 @@ import java.util.List;
*/
public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement
{
private final SqlLifecycle sqlStatement;
private final SqlQueryPlus queryPlus;
private final SqlLifecycleFactory lifecycleFactory;
private final PreparedStatement sqlStatement;
private final long maxRowCount;
private Meta.Signature signature;
private State state = State.NEW;
public DruidJdbcPreparedStatement(
final DruidConnection connection,
final String connectionId,
final int statementId,
final SqlQueryPlus queryPlus,
final SqlLifecycleFactory lifecycleFactory,
final PreparedStatement stmt,
final long maxRowCount
)
{
super(connection, statementId);
this.lifecycleFactory = lifecycleFactory;
this.queryPlus = queryPlus;
super(connectionId, statementId);
this.sqlStatement = stmt;
this.maxRowCount = maxRowCount;
this.sqlStatement = lifecycleFactory.factorize();
sqlStatement.initialize(queryPlus.sql(), connection.makeContext());
}
public synchronized void prepare()
{
try {
ensure(State.NEW);
sqlStatement.validateAndAuthorize(queryPlus.authResult());
PrepareResult prepareResult = sqlStatement.prepare();
signature = createSignature(
prepareResult,
queryPlus.sql()
sqlStatement.sqlRequest().sql()
);
state = State.PREPARED;
}
// Preserve the type of forbidden and runtime exceptions.
catch (ForbiddenException e) {
// Can't finalize statement in in this case. Call will fail with an
// assertion error.
DruidMeta.logFailure(e);
state = State.CLOSED;
close();
throw e;
}
catch (RuntimeException e) {
failed(e);
close();
throw e;
}
// Wrap everything else
catch (Throwable t) {
failed(t);
close();
throw new RuntimeException(t);
}
}
@ -106,19 +97,17 @@ public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement
ensure(State.PREPARED);
closeResultSet();
try {
SqlLifecycle directStmt = lifecycleFactory.factorize();
directStmt.initialize(queryPlus.sql(), connection.makeContext());
directStmt.setParameters(parameters);
resultSet = new DruidJdbcResultSet(this, queryPlus, directStmt, maxRowCount);
DirectStatement directStmt = sqlStatement.execute(parameters);
resultSet = new DruidJdbcResultSet(this, directStmt, maxRowCount);
resultSet.execute();
}
// Failure to execute does not close the prepared statement.
catch (RuntimeException e) {
failed(e);
resultSet = null;
throw e;
}
catch (Throwable t) {
failed(t);
resultSet = null;
throw new RuntimeException(t);
}
}
@ -134,19 +123,12 @@ public class DruidJdbcPreparedStatement extends AbstractDruidJdbcStatement
throw new ISE("Invalid action for state [%s]", state);
}
private void failed(Throwable t)
{
super.close();
sqlStatement.finalizeStateAndEmitLogsAndMetrics(t, null, -1);
state = State.CLOSED;
}
@Override
public synchronized void close()
{
if (state != State.CLOSED) {
super.close();
sqlStatement.finalizeStateAndEmitLogsAndMetrics(null, null, -1);
sqlStatement.close();
}
state = State.CLOSED;
}

View File

@ -22,26 +22,27 @@ package org.apache.druid.sql.avatica;
import com.google.common.base.Preconditions;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.tools.RelConversionException;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.guava.Yielders;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.DirectStatement;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
/**
* Druid's server-side representation of a JDBC result set. At most one
* can be open per statement (standard or prepared). The implementation
* is based on Druid's {@link SqlLifecycle} class. Even if result
* is based on Druid's own {@link DirectStatement} class. Even if result
* set is for a {@code PreparedStatement}, the result set itself uses
* a Druid {@code SqlLifecycle} which includes the parameter values
* a Druid {@code DirectStatement} which includes the parameter values
* given for the execution. This allows Druid's planner to use the "query
* optimized" form of parameter substitution: we replan the query for
* each execution with the parameter values.
@ -72,9 +73,8 @@ public class DruidJdbcResultSet implements Closeable
* https://github.com/apache/druid/pull/4288
* https://github.com/apache/druid/pull/4415
*/
private final AbstractDruidJdbcStatement jdbcStatement;
private final SqlQueryPlus sqlRequest;
private final SqlLifecycle stmt;
private final ExecutorService yielderOpenCloseExecutor;
private final DirectStatement stmt;
private final long maxRowCount;
private State state = State.NEW;
private Meta.Signature signature;
@ -83,30 +83,27 @@ public class DruidJdbcResultSet implements Closeable
public DruidJdbcResultSet(
final AbstractDruidJdbcStatement jdbcStatement,
final SqlQueryPlus sqlRequest,
final SqlLifecycle stmt,
DirectStatement stmt,
final long maxRowCount
)
{
this.jdbcStatement = jdbcStatement;
this.stmt = stmt;
this.sqlRequest = sqlRequest;
this.maxRowCount = maxRowCount;
this.yielderOpenCloseExecutor = Execs.singleThreaded(
StringUtils.format(
"JDBCYielderOpenCloseExecutor-connection-%s-statement-%d",
StringUtils.encodeForFormat(jdbcStatement.getConnectionId()),
jdbcStatement.getStatementId()
)
);
}
public synchronized void execute() throws RelConversionException
public synchronized void execute()
{
ensure(State.NEW);
stmt.validateAndAuthorize(sqlRequest.authResult());
PrepareResult prepareResult = stmt.prepare();
stmt.plan();
signature = AbstractDruidJdbcStatement.createSignature(
prepareResult,
sqlRequest.sql()
);
try {
state = State.RUNNING;
final Sequence<Object[]> baseSequence = jdbcStatement.executor().submit(stmt::execute).get();
final Sequence<Object[]> baseSequence = yielderOpenCloseExecutor.submit(stmt::execute).get();
// We can't apply limits greater than Integer.MAX_VALUE, ignore them.
final Sequence<Object[]> retSequence =
@ -115,6 +112,13 @@ public class DruidJdbcResultSet implements Closeable
: baseSequence;
yielder = Yielders.each(retSequence);
signature = AbstractDruidJdbcStatement.createSignature(
stmt.prepareResult(),
stmt.sqlRequest().sql()
);
}
catch (ExecutionException e) {
throw closeAndPropagateThrowable(e.getCause());
}
catch (Throwable t) {
throw closeAndPropagateThrowable(t);
@ -180,8 +184,9 @@ public class DruidJdbcResultSet implements Closeable
{
DruidMeta.logFailure(t);
// Report a failure so that the failure is logged.
stmt.reporter().failed(t);
try {
close(t);
close();
}
catch (Throwable t1) {
t.addSuppressed(t1);
@ -199,11 +204,6 @@ public class DruidJdbcResultSet implements Closeable
@Override
public synchronized void close()
{
close(null);
}
private void close(Throwable error)
{
if (state == State.NEW) {
state = State.CLOSED;
@ -218,7 +218,7 @@ public class DruidJdbcResultSet implements Closeable
this.yielder = null;
// Put the close last, so any exceptions it throws are after we did the other cleanup above.
jdbcStatement.executor().submit(
yielderOpenCloseExecutor.submit(
() -> {
theYielder.close();
// makes this a Callable instead of Runnable so we don't need to catch exceptions inside the lambda
@ -226,6 +226,7 @@ public class DruidJdbcResultSet implements Closeable
}
).get();
yielderOpenCloseExecutor.shutdownNow();
}
}
catch (RuntimeException e) {
@ -235,7 +236,8 @@ public class DruidJdbcResultSet implements Closeable
throw new RuntimeException(t);
}
finally {
stmt.finalizeStateAndEmitLogsAndMetrics(error, null, -1);
// Closing the statement cause logs and metrics to be emitted.
stmt.close();
}
}

View File

@ -21,11 +21,10 @@ package org.apache.druid.sql.avatica;
import com.google.common.base.Preconditions;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.tools.RelConversionException;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.query.QueryContext;
import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
/**
* Represents Druid's version of the JDBC {@code Statement} class:
@ -34,39 +33,32 @@ import org.apache.druid.sql.SqlQueryPlus;
*/
public class DruidJdbcStatement extends AbstractDruidJdbcStatement
{
private final SqlLifecycleFactory lifecycleFactory;
protected boolean closed;
private final SqlStatementFactory lifecycleFactory;
protected final QueryContext queryContext;
public DruidJdbcStatement(
final DruidConnection connection,
final String connectionId,
final int statementId,
final SqlLifecycleFactory lifecycleFactory
final QueryContext queryContext,
final SqlStatementFactory lifecycleFactory
)
{
super(connection, statementId);
super(connectionId, statementId);
this.queryContext = queryContext;
this.lifecycleFactory = Preconditions.checkNotNull(lifecycleFactory, "lifecycleFactory");
}
public synchronized void execute(SqlQueryPlus sqlRequest, long maxRowCount) throws RelConversionException
public synchronized void execute(SqlQueryPlus queryPlus, long maxRowCount)
{
closeResultSet();
SqlLifecycle stmt = lifecycleFactory.factorize();
stmt.initialize(sqlRequest.sql(), connection.makeContext());
queryPlus = queryPlus.withContext(queryContext);
DirectStatement stmt = lifecycleFactory.directStatement(queryPlus);
resultSet = new DruidJdbcResultSet(this, stmt, Long.MAX_VALUE);
try {
stmt.validateAndAuthorize(sqlRequest.authResult());
resultSet = new DruidJdbcResultSet(this, sqlRequest, stmt, Long.MAX_VALUE);
resultSet.execute();
}
catch (ForbiddenException e) {
// Can't finalize statement in in this case. Call will fail with an
// assertion error.
resultSet = null;
DruidMeta.logFailure(e);
throw e;
}
catch (Throwable t) {
stmt.finalizeStateAndEmitLogsAndMetrics(t, null, -1);
resultSet = null;
closeResultSet();
throw t;
}
}

View File

@ -29,24 +29,29 @@ import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.Inject;
import com.google.inject.Injector;
import org.apache.calcite.avatica.AvaticaSeverity;
import org.apache.calcite.avatica.MetaImpl;
import org.apache.calcite.avatica.MissingResultsException;
import org.apache.calcite.avatica.NoSuchConnectionException;
import org.apache.calcite.avatica.NoSuchStatementException;
import org.apache.calcite.avatica.QueryState;
import org.apache.calcite.avatica.remote.AvaticaRuntimeException;
import org.apache.calcite.avatica.remote.Service.ErrorResponse;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.UOE;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.QueryContext;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.Authenticator;
import org.apache.druid.server.security.AuthenticatorMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.Interval;
import javax.annotation.Nonnull;
@ -100,7 +105,7 @@ public class DruidMeta extends MetaImpl
"user", "password"
);
private final SqlLifecycleFactory sqlLifecycleFactory;
private final SqlStatementFactory sqlLifecycleFactory;
private final ScheduledExecutorService exec;
private final AvaticaServerConfig config;
private final List<Authenticator> authenticators;
@ -119,7 +124,7 @@ public class DruidMeta extends MetaImpl
@Inject
public DruidMeta(
final SqlLifecycleFactory sqlLifecycleFactory,
final SqlStatementFactory sqlLifecycleFactory,
final AvaticaServerConfig config,
final ErrorHandler errorHandler,
final Injector injector
@ -156,7 +161,10 @@ public class DruidMeta extends MetaImpl
}
}
}
openDruidConnection(ch.id, secret, contextMap);
// we don't want to stringify arrays for JDBC ever because Avatica needs to handle this
final QueryContext context = new QueryContext(contextMap);
context.addSystemParam(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false);
openDruidConnection(ch.id, secret, context);
}
catch (NoSuchConnectionException e) {
throw e;
@ -164,7 +172,7 @@ public class DruidMeta extends MetaImpl
catch (Throwable t) {
// we want to avoid sanitizing Avatica specific exceptions as the Avatica code can rely on them to handle issues
// differently
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -182,7 +190,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -198,7 +206,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -218,7 +226,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -242,10 +250,11 @@ public class DruidMeta extends MetaImpl
null, // No parameters in this path
doAuthenticate(druidConnection)
);
DruidJdbcPreparedStatement stmt = druidConnection.createPreparedStatement(
DruidJdbcPreparedStatement stmt = getDruidConnection(ch.id).createPreparedStatement(
sqlLifecycleFactory,
sqlReq,
maxRowCount);
stmt.prepare();
LOG.debug("Successfully prepared statement [%s] for execution", stmt.getStatementId());
return new StatementHandle(ch.id, stmt.getStatementId(), stmt.getSignature());
}
@ -253,7 +262,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -299,9 +308,11 @@ public class DruidMeta extends MetaImpl
// Ignore "callback", this class is designed for use with LocalService which doesn't use it.
final DruidJdbcStatement druidStatement = getDruidStatement(statement, DruidJdbcStatement.class);
final DruidConnection druidConnection = getDruidConnection(statement.connectionId);
// No parameters for a "regular" JDBC statement.
SqlQueryPlus sqlRequest = new SqlQueryPlus(sql, null, null, doAuthenticate(druidConnection));
druidConnection.prepareAndExecute(druidStatement, sqlRequest, maxRowCount);
AuthenticationResult authenticationResult = doAuthenticate(druidConnection);
SqlQueryPlus sqlRequest = SqlQueryPlus.builder(sql)
.auth(authenticationResult)
.build();
druidStatement.execute(sqlRequest, maxRowCount);
ExecuteResult result = doFetch(druidStatement, maxRowsInFirstFrame);
LOG.debug("Successfully prepared statement [%s] and started execution", druidStatement.getStatementId());
return result;
@ -311,10 +322,32 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
/**
* Convert a Druid exception to an Avatica exception. Avatica can pass
* along things like an error code and SQL state. There are defined
* values for security failures, so map to those.
*/
private RuntimeException mapException(Throwable t)
{
// BasicSecurityAuthenticationException is not visible here.
String className = t.getClass().getSimpleName();
if (t instanceof ForbiddenException ||
"BasicSecurityAuthenticationException".equals(className)) {
throw new AvaticaRuntimeException(
t.getMessage(),
ErrorResponse.UNAUTHORIZED_ERROR_CODE,
ErrorResponse.UNAUTHORIZED_SQL_STATE,
AvaticaSeverity.ERROR);
}
// Let Avatica do its default mapping.
throw errorHandler.sanitize(t);
}
private ExecuteResult doFetch(AbstractDruidJdbcStatement druidStatement, int maxRows)
{
final Signature signature = druidStatement.getSignature();
@ -326,7 +359,7 @@ public class DruidMeta extends MetaImpl
return new ExecuteResult(
ImmutableList.of(
MetaResultSet.create(
druidStatement.getConnectionId(),
druidStatement.connectionId,
druidStatement.statementId,
false,
signature,
@ -372,7 +405,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -409,7 +442,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -440,7 +473,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -468,7 +501,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -507,7 +540,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -544,7 +577,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -603,7 +636,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -673,7 +706,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -694,7 +727,7 @@ public class DruidMeta extends MetaImpl
throw e;
}
catch (Throwable t) {
throw errorHandler.sanitize(t);
throw mapException(t);
}
}
@ -729,7 +762,7 @@ public class DruidMeta extends MetaImpl
private DruidConnection openDruidConnection(
final String connectionId,
final Map<String, Object> userSecret,
final Map<String, Object> context
final QueryContext context
)
{
if (connectionCount.incrementAndGet() > config.getMaxConnections()) {

View File

@ -32,7 +32,7 @@ import org.apache.druid.server.security.ForbiddenException;
/**
* ErrorHandler is a utilty class that is used to sanitize exceptions.
* ErrorHandler is a utility class that is used to sanitize exceptions.
*/
class ErrorHandler
{
@ -61,6 +61,8 @@ class ErrorHandler
if (error instanceof ForbiddenException) {
return (ForbiddenException) errorResponseTransformStrategy.transformIfNeeded((ForbiddenException) error);
}
// Should map BasicSecurityAuthenticationException also, but the class is not
// visible here.
if (error instanceof ISE) {
return (ISE) errorResponseTransformStrategy.transformIfNeeded((ISE) error);
}
@ -74,7 +76,7 @@ class ErrorHandler
// cannot check cause of the throwable because it cannot be cast back to the original's type
// so this only checks runtime exceptions for causes
if (error instanceof RuntimeException && error.getCause() instanceof SanitizableException) {
// could do `throw sanitize(error);` but just sanitizing immediatley avoids unnecessary going down multiple levels
// could do `throw sanitize(error);` but just sanitizing immediately avoids unnecessary going down multiple levels
return new RuntimeException(errorResponseTransformStrategy.transformIfNeeded((SanitizableException) error.getCause()));
}
QueryInterruptedException wrappedError = QueryInterruptedException.wrapIfNeeded(error);
@ -84,7 +86,7 @@ class ErrorHandler
/**
* Check to see if something needs to be sanitized.
* <p>
* This does this by checking to see if the ErrorResponse is different than a NoOp Error response transform strategy.
* Done by checking to see if the ErrorResponse is different than a NoOp Error response transform strategy.
*
* @return a boolean that returns true if error handler has an error response strategy other than the NoOp error
* response strategy

View File

@ -42,6 +42,7 @@ import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.math.expr.ExpressionProcessing;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
@ -57,6 +58,7 @@ import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.ISODateTimeFormat;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.nio.charset.Charset;
import java.sql.Date;
@ -69,9 +71,13 @@ import java.util.regex.Pattern;
/**
* Utility functions for Calcite.
* <p>
* See also the file {@code saffron.properties} which holds the
* character set system properties formerly set in this file.
*/
public class Calcites
{
private static final EmittingLogger log = new EmittingLogger(Calcites.class);
private static final DateTimes.UtcFormatter CALCITE_DATE_PARSER = DateTimes.wrapFormatter(ISODateTimeFormat.dateParser());
private static final DateTimes.UtcFormatter CALCITE_TIMESTAMP_PARSER = DateTimes.wrapFormatter(
new DateTimeFormatterBuilder()
@ -100,26 +106,6 @@ public class Calcites
// No instantiation.
}
public static void setSystemProperties()
{
// These properties control the charsets used for SQL literals. I don't see a way to change this except through
// system properties, so we'll have to set those...
final String charset = ConversionUtil.NATIVE_UTF16_CHARSET_NAME;
// Deprecated in Calcite 1.19. See:
// https://calcite.apache.org/javadocAggregate/org/apache/calcite/util/SaffronProperties.html
System.setProperty("saffron.default.charset", Calcites.defaultCharset().name());
System.setProperty("saffron.default.nationalcharset", Calcites.defaultCharset().name());
System.setProperty("saffron.default.collation.name", StringUtils.format("%s$en_US", charset));
// The following are the current names. See org.apache.calcite.config.CalciteSystemProperty
// https://github.com/apache/calcite/blob/master/core/src/main/java/org/apache/calcite/config/CalciteSystemProperty.java
System.setProperty("calcite.default.charset", Calcites.defaultCharset().name());
System.setProperty("calcite.default.nationalcharset", Calcites.defaultCharset().name());
System.setProperty("calcite.default.collation.name", StringUtils.format("%s$en_US", charset));
}
public static Charset defaultCharset()
{
return DEFAULT_CHARSET;
@ -144,7 +130,6 @@ public class Calcites
}
builder.append("'");
return isPlainAscii ? builder.toString() : "U&" + builder;
}
/**
@ -273,8 +258,6 @@ public class Calcites
final boolean nullable
)
{
final RelDataType dataType = typeFactory.createArrayType(
createSqlTypeWithNullability(typeFactory, elementTypeName, nullable),
-1

View File

@ -132,6 +132,7 @@ public class DruidPlanner implements Closeable
private ParsedNodes parsed;
private SqlNode validatedQueryNode;
private boolean authorized;
private PrepareResult prepareResult;
private Set<ResourceAction> resourceActions;
private RelRoot rootQueryRel;
private RexBuilder rexBuilder;
@ -148,13 +149,6 @@ public class DruidPlanner implements Closeable
this.queryMakerFactory = queryMakerFactory;
}
private ParsedNodes parse() throws SqlParseException, ValidationException
{
resetPlanner();
SqlNode root = planner.parse(plannerContext.getSql());
return ParsedNodes.create(root, plannerContext.getTimeZone());
}
/**
* Validates a SQL query and populates {@link PlannerContext#getResourceActions()}.
*
@ -164,7 +158,6 @@ public class DruidPlanner implements Closeable
public void validate() throws SqlParseException, ValidationException
{
Preconditions.checkState(state == State.START);
resetPlanner();
SqlNode root = planner.parse(plannerContext.getSql());
parsed = ParsedNodes.create(root, plannerContext.getTimeZone());
@ -244,7 +237,13 @@ public class DruidPlanner implements Closeable
Preconditions.checkState(state == State.VALIDATED);
rootQueryRel = planner.rel(validatedQueryNode);
doPrepare(null);
state = State.PREPARED;
return prepareResult;
}
private void doPrepare(@Nullable QueryMaker queryMaker) throws ValidationException
{
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final SqlValidator validator = planner.getValidator();
final RelDataType parameterTypes = validator.getParameterRowType(validatedQueryNode);
@ -253,11 +252,13 @@ public class DruidPlanner implements Closeable
if (parsed.getExplainNode() != null) {
returnedRowType = getExplainStructType(typeFactory);
} else {
returnedRowType = buildQueryMaker(rootQueryRel, parsed.getInsertOrReplace()).getResultType();
if (queryMaker == null) {
queryMaker = buildQueryMaker(rootQueryRel, parsed.getInsertOrReplace());
}
returnedRowType = queryMaker.getResultType();
}
state = State.PREPARED;
return new PrepareResult(returnedRowType, parameterTypes);
prepareResult = new PrepareResult(returnedRowType, parameterTypes);
}
/**
@ -285,32 +286,30 @@ public class DruidPlanner implements Closeable
* an authenticated request must be authorized for to process the
* query. The actions will be {@code null} if the
* planner has not yet advanced to the validation step. This may occur if
* validation fails and the caller ({@code SqlLifecycle}) accesses the resource
* validation fails and the caller accesses the resource
* actions as part of clean-up.
*/
public Set<ResourceAction> resourceActions(boolean includeContext)
{
Set<ResourceAction> actions;
if (includeContext) {
actions = new HashSet<>(resourceActions);
Set<ResourceAction> actions = new HashSet<>(resourceActions);
plannerContext.getQueryContext().getUserParams().keySet().forEach(contextParam -> actions.add(
new ResourceAction(new Resource(contextParam, ResourceType.QUERY_CONTEXT), Action.WRITE)
));
return actions;
} else {
actions = resourceActions;
return resourceActions;
}
return actions;
}
/**
* Plan an SQL query for execution, returning a {@link PlannerResult} which can be used to actually execute the query.
*
* Ideally, the query can be planned into a native Druid query, using {@link #planWithDruidConvention}, but will
* fall back to {@link #planWithBindableConvention} if this is not possible.
* fall-back to {@link #planWithBindableConvention} if this is not possible.
*
* Planning reuses the validation done in `validate()` which must be called first.
*/
@SuppressWarnings("RedundantThrows")
public PlannerResult plan() throws ValidationException
{
Preconditions.checkState(state == State.VALIDATED || state == State.PREPARED);
@ -359,33 +358,17 @@ public class DruidPlanner implements Closeable
return plannerContext;
}
public PrepareResult prepareResult()
{
return prepareResult;
}
@Override
public void close()
{
planner.close();
}
/**
* While the actual query might not have changed, if the druid planner is re-used, we still have the need to reset the
* {@link #planner} since we do not re-use artifacts or keep track of state between
* {@link #validate}, {@link #prepare}, and {@link #plan} and instead repeat parsing and validation
* for each step.
*
* Currently, that state tracking is done in {@link org.apache.druid.sql.SqlLifecycle}, which will create a new
* planner for each of the corresponding steps so this isn't strictly necessary at this time, this method is here as
* much to make this situation explicit and provide context for a future refactor as anything else (and some tests
* do re-use the planner between validate, prepare, and plan, which will run into this issue).
*
* This could be improved by tying {@link org.apache.druid.sql.SqlLifecycle} and {@link DruidPlanner} states more
* closely with the state of {@link #planner}, instead of repeating parsing and validation between each of these
* steps.
*/
private void resetPlanner()
{
planner.close();
planner.reset();
}
/**
* Construct a {@link PlannerResult} for a {@link RelNode} that is directly translatable to a native Druid query.
*/
@ -398,6 +381,9 @@ public class DruidPlanner implements Closeable
final RelRoot possiblyLimitedRoot = possiblyWrapRootWithOuterLimitFromContext(root);
final QueryMaker queryMaker = buildQueryMaker(possiblyLimitedRoot, insertOrReplace);
plannerContext.setQueryMaker(queryMaker);
if (prepareResult == null) {
doPrepare(queryMaker);
}
// Fall-back dynamic parameter substitution using {@link RelParameterizerShuttle}
// in the event that {@link #rewriteDynamicParameters(SqlNode)} was unable to
@ -513,10 +499,7 @@ public class DruidPlanner implements Closeable
@Override
public Object[] next()
{
// Avoids an Intellij IteratorNextCanNotThrowNoSuchElementException
// warning.
Object[] temp = (Object[]) enumerator.current();
return temp;
return (Object[]) enumerator.current();
}
});
}

View File

@ -83,6 +83,8 @@ public class PlannerConfig
@JsonProperty
private int maxNumericInFilters = NUM_FILTER_NOT_USED;
private boolean serializeComplexValues = true;
public long getMetadataSegmentPollPeriod()
{
return metadataSegmentPollPeriod;
@ -98,8 +100,6 @@ public class PlannerConfig
return metadataSegmentCacheEnable;
}
private boolean serializeComplexValues = true;
public Period getMetadataRefreshPeriod()
{
return metadataRefreshPeriod;
@ -174,71 +174,9 @@ public class PlannerConfig
if (queryContext.isEmpty()) {
return this;
}
final PlannerConfig newConfig = new PlannerConfig();
newConfig.metadataRefreshPeriod = getMetadataRefreshPeriod();
newConfig.maxTopNLimit = getMaxTopNLimit();
newConfig.useApproximateCountDistinct = queryContext.getAsBoolean(
CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT,
isUseApproximateCountDistinct()
);
newConfig.useGroupingSetForExactDistinct = queryContext.getAsBoolean(
CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
isUseGroupingSetForExactDistinct()
);
newConfig.useApproximateTopN = queryContext.getAsBoolean(
CTX_KEY_USE_APPROXIMATE_TOPN,
isUseApproximateTopN()
);
newConfig.computeInnerJoinCostAsFilter = queryContext.getAsBoolean(
CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER,
computeInnerJoinCostAsFilter
);
newConfig.useNativeQueryExplain = queryContext.getAsBoolean(
CTX_KEY_USE_NATIVE_QUERY_EXPLAIN,
isUseNativeQueryExplain()
);
newConfig.forceExpressionVirtualColumns = queryContext.getAsBoolean(
CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS,
isForceExpressionVirtualColumns()
);
final int systemConfigMaxNumericInFilters = getMaxNumericInFilters();
final int queryContextMaxNumericInFilters = queryContext.getAsInt(
CTX_MAX_NUMERIC_IN_FILTERS,
getMaxNumericInFilters()
);
newConfig.maxNumericInFilters = validateMaxNumericInFilters(queryContextMaxNumericInFilters,
systemConfigMaxNumericInFilters);
newConfig.requireTimeCondition = isRequireTimeCondition();
newConfig.sqlTimeZone = getSqlTimeZone();
newConfig.awaitInitializationOnStart = isAwaitInitializationOnStart();
newConfig.metadataSegmentCacheEnable = isMetadataSegmentCacheEnable();
newConfig.metadataSegmentPollPeriod = getMetadataSegmentPollPeriod();
newConfig.serializeComplexValues = shouldSerializeComplexValues();
newConfig.authorizeSystemTablesDirectly = isAuthorizeSystemTablesDirectly();
return newConfig;
}
private int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters)
{
// if maxNumericInFIlters through context == 0 catch exception
// else if query context exceeds system set value throw error
if (queryContextMaxNumericInFilters == 0) {
throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS);
} else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters
&& systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) {
throw new UOE(
"Expected parameter[%s] cannot exceed system set value of [%d]",
CTX_MAX_NUMERIC_IN_FILTERS,
systemConfigMaxNumericInFilters
);
}
// if system set value is not present, thereby inferring default of -1
if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) {
return systemConfigMaxNumericInFilters;
}
// all other cases return the valid query context value
return queryContextMaxNumericInFilters;
return toBuilder()
.withOverrides(queryContext)
.build();
}
@Override
@ -302,4 +240,215 @@ public class PlannerConfig
", useNativeQueryExplain=" + useNativeQueryExplain +
'}';
}
public static Builder builder()
{
return new PlannerConfig().toBuilder();
}
public Builder toBuilder()
{
return new Builder(this);
}
/**
* Builder for {@link PlannerConfig}, primarily for use in tests to
* allow setting options programmatically rather than from the command
* line or a properties file. Starts with values from an existing
* (typically default) config.
*/
public static class Builder
{
private Period metadataRefreshPeriod;
private int maxTopNLimit;
private boolean useApproximateCountDistinct;
private boolean useApproximateTopN;
private boolean requireTimeCondition;
private boolean awaitInitializationOnStart;
private DateTimeZone sqlTimeZone;
private boolean metadataSegmentCacheEnable;
private long metadataSegmentPollPeriod;
private boolean useGroupingSetForExactDistinct;
private boolean computeInnerJoinCostAsFilter;
private boolean authorizeSystemTablesDirectly;
private boolean useNativeQueryExplain;
private boolean forceExpressionVirtualColumns;
private int maxNumericInFilters;
private boolean serializeComplexValues;
public Builder(PlannerConfig base)
{
// Note: use accessors, not fields, since some tests change the
// config by defining a subclass.
metadataRefreshPeriod = base.getMetadataRefreshPeriod();
maxTopNLimit = base.getMaxTopNLimit();
useApproximateCountDistinct = base.isUseApproximateCountDistinct();
useApproximateTopN = base.isUseApproximateTopN();
requireTimeCondition = base.isRequireTimeCondition();
awaitInitializationOnStart = base.isAwaitInitializationOnStart();
sqlTimeZone = base.getSqlTimeZone();
metadataSegmentCacheEnable = base.isMetadataSegmentCacheEnable();
useGroupingSetForExactDistinct = base.isUseGroupingSetForExactDistinct();
metadataSegmentPollPeriod = base.getMetadataSegmentPollPeriod();
computeInnerJoinCostAsFilter = base.computeInnerJoinCostAsFilter;
authorizeSystemTablesDirectly = base.isAuthorizeSystemTablesDirectly();
useNativeQueryExplain = base.isUseNativeQueryExplain();
forceExpressionVirtualColumns = base.isForceExpressionVirtualColumns();
maxNumericInFilters = base.getMaxNumericInFilters();
serializeComplexValues = base.shouldSerializeComplexValues();
}
public Builder requireTimeCondition(boolean option)
{
this.requireTimeCondition = option;
return this;
}
public Builder maxTopNLimit(int value)
{
this.maxTopNLimit = value;
return this;
}
public Builder maxNumericInFilters(int value)
{
this.maxNumericInFilters = value;
return this;
}
public Builder useApproximateCountDistinct(boolean option)
{
this.useApproximateCountDistinct = option;
return this;
}
public Builder useApproximateTopN(boolean option)
{
this.useApproximateTopN = option;
return this;
}
public Builder useGroupingSetForExactDistinct(boolean option)
{
this.useGroupingSetForExactDistinct = option;
return this;
}
public Builder computeInnerJoinCostAsFilter(boolean option)
{
this.computeInnerJoinCostAsFilter = option;
return this;
}
public Builder sqlTimeZone(DateTimeZone value)
{
this.sqlTimeZone = value;
return this;
}
public Builder authorizeSystemTablesDirectly(boolean option)
{
this.authorizeSystemTablesDirectly = option;
return this;
}
public Builder serializeComplexValues(boolean option)
{
this.serializeComplexValues = option;
return this;
}
public Builder useNativeQueryExplain(boolean option)
{
this.useNativeQueryExplain = option;
return this;
}
public Builder metadataRefreshPeriod(String value)
{
this.metadataRefreshPeriod = new Period(value);
return this;
}
public Builder withOverrides(final QueryContext queryContext)
{
useApproximateCountDistinct = queryContext.getAsBoolean(
CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT,
useApproximateCountDistinct
);
useGroupingSetForExactDistinct = queryContext.getAsBoolean(
CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
useGroupingSetForExactDistinct
);
useApproximateTopN = queryContext.getAsBoolean(
CTX_KEY_USE_APPROXIMATE_TOPN,
useApproximateTopN
);
computeInnerJoinCostAsFilter = queryContext.getAsBoolean(
CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER,
computeInnerJoinCostAsFilter
);
useNativeQueryExplain = queryContext.getAsBoolean(
CTX_KEY_USE_NATIVE_QUERY_EXPLAIN,
useNativeQueryExplain
);
forceExpressionVirtualColumns = queryContext.getAsBoolean(
CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS,
forceExpressionVirtualColumns
);
final int queryContextMaxNumericInFilters = queryContext.getAsInt(
CTX_MAX_NUMERIC_IN_FILTERS,
maxNumericInFilters
);
maxNumericInFilters = validateMaxNumericInFilters(
queryContextMaxNumericInFilters,
maxNumericInFilters);
return this;
}
private static int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters)
{
// if maxNumericInFIlters through context == 0 catch exception
// else if query context exceeds system set value throw error
if (queryContextMaxNumericInFilters == 0) {
throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS);
} else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters
&& systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) {
throw new UOE(
"Expected parameter[%s] cannot exceed system set value of [%d]",
CTX_MAX_NUMERIC_IN_FILTERS,
systemConfigMaxNumericInFilters
);
}
// if system set value is not present, thereby inferring default of -1
if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) {
return systemConfigMaxNumericInFilters;
}
// all other cases return the valid query context value
return queryContextMaxNumericInFilters;
}
public PlannerConfig build()
{
PlannerConfig config = new PlannerConfig();
config.metadataRefreshPeriod = metadataRefreshPeriod;
config.maxTopNLimit = maxTopNLimit;
config.useApproximateCountDistinct = useApproximateCountDistinct;
config.useApproximateTopN = useApproximateTopN;
config.requireTimeCondition = requireTimeCondition;
config.awaitInitializationOnStart = awaitInitializationOnStart;
config.sqlTimeZone = sqlTimeZone;
config.metadataSegmentCacheEnable = metadataSegmentCacheEnable;
config.metadataSegmentPollPeriod = metadataSegmentPollPeriod;
config.useGroupingSetForExactDistinct = useGroupingSetForExactDistinct;
config.computeInnerJoinCostAsFilter = computeInnerJoinCostAsFilter;
config.authorizeSystemTablesDirectly = authorizeSystemTablesDirectly;
config.useNativeQueryExplain = useNativeQueryExplain;
config.maxNumericInFilters = maxNumericInFilters;
config.forceExpressionVirtualColumns = forceExpressionVirtualColumns;
config.serializeComplexValues = serializeComplexValues;
return config;
}
}
}

View File

@ -22,11 +22,13 @@ package org.apache.druid.sql.calcite.rule;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.sql.calcite.rel.DruidConvention;
import org.apache.druid.sql.calcite.rel.DruidRel;
public class DruidRelToDruidRule extends ConverterRule
{
private static final Logger log = new Logger(DruidRelToDruidRule.class);
private static final DruidRelToDruidRule INSTANCE = new DruidRelToDruidRule();
private DruidRelToDruidRule()
@ -47,6 +49,12 @@ public class DruidRelToDruidRule extends ConverterRule
@Override
public RelNode convert(RelNode rel)
{
return ((DruidRel) rel).asDruidConvention();
try {
return ((DruidRel<?>) rel).asDruidConvention();
}
catch (Exception e) {
log.error(e, "Conversion failed");
throw e;
}
}
}

View File

@ -1153,7 +1153,7 @@ public class SystemSchema extends AbstractSchema
authorizerMapper
);
if (!stateAccess.isAllowed()) {
throw new ForbiddenException("Insufficient permission to view servers : " + stateAccess);
throw new ForbiddenException("Insufficient permission to view servers: " + stateAccess.toMessage());
}
}
}

View File

@ -31,7 +31,6 @@ import org.apache.druid.sql.avatica.AvaticaModule;
import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule;
import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion;
import org.apache.druid.sql.calcite.planner.CalcitePlannerModule;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.schema.DruidCalciteSchemaModule;
import org.apache.druid.sql.calcite.schema.DruidSchemaManager;
import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager;
@ -56,7 +55,6 @@ public class SqlModule implements Module
public SqlModule()
{
}
@VisibleForTesting
@ -70,48 +68,48 @@ public class SqlModule implements Module
@Override
public void configure(Binder binder)
{
if (isEnabled()) {
Calcites.setSystemProperties();
if (!isEnabled()) {
return;
}
PolyBind.optionBinder(binder, Key.get(ViewManager.class))
.addBinding(NoopViewManager.TYPE)
.to(NoopViewManager.class)
.in(LazySingleton.class);
PolyBind.optionBinder(binder, Key.get(ViewManager.class))
.addBinding(NoopViewManager.TYPE)
.to(NoopViewManager.class)
.in(LazySingleton.class);
PolyBind.createChoiceWithDefault(
binder,
PROPERTY_SQL_VIEW_MANAGER_TYPE,
Key.get(ViewManager.class),
NoopViewManager.TYPE
);
PolyBind.createChoiceWithDefault(
binder,
PROPERTY_SQL_VIEW_MANAGER_TYPE,
Key.get(ViewManager.class),
NoopViewManager.TYPE
);
PolyBind.optionBinder(binder, Key.get(DruidSchemaManager.class))
.addBinding(NoopDruidSchemaManager.TYPE)
.to(NoopDruidSchemaManager.class)
.in(LazySingleton.class);
PolyBind.optionBinder(binder, Key.get(DruidSchemaManager.class))
.addBinding(NoopDruidSchemaManager.TYPE)
.to(NoopDruidSchemaManager.class)
.in(LazySingleton.class);
PolyBind.createChoiceWithDefault(
binder,
PROPERTY_SQL_SCHEMA_MANAGER_TYPE,
Key.get(DruidSchemaManager.class),
NoopDruidSchemaManager.TYPE
);
PolyBind.createChoiceWithDefault(
binder,
PROPERTY_SQL_SCHEMA_MANAGER_TYPE,
Key.get(DruidSchemaManager.class),
NoopDruidSchemaManager.TYPE
);
binder.install(new DruidCalciteSchemaModule());
binder.install(new CalcitePlannerModule());
binder.install(new SqlAggregationModule());
binder.install(new DruidViewModule());
binder.install(new DruidCalciteSchemaModule());
binder.install(new CalcitePlannerModule());
binder.install(new SqlAggregationModule());
binder.install(new DruidViewModule());
// QueryLookupOperatorConversion isn't in DruidOperatorTable since it needs a LookupExtractorFactoryContainerProvider injected.
SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class);
// QueryLookupOperatorConversion isn't in DruidOperatorTable since it needs a LookupExtractorFactoryContainerProvider injected.
SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class);
if (isJsonOverHttpEnabled()) {
binder.install(new SqlHttpModule());
}
if (isJsonOverHttpEnabled()) {
binder.install(new SqlHttpModule());
}
if (isAvaticaEnabled()) {
binder.install(new AvaticaModule());
}
if (isAvaticaEnabled()) {
binder.install(new AvaticaModule());
}
}

View File

@ -34,7 +34,6 @@ import org.apache.druid.java.util.common.guava.Yielders;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.BadQueryException;
import org.apache.druid.query.QueryCapacityExceededException;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.query.QueryUnsupportedException;
@ -44,11 +43,13 @@ import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.HttpStatement;
import org.apache.druid.sql.SqlExecutionReporter;
import org.apache.druid.sql.SqlLifecycleManager;
import org.apache.druid.sql.SqlLifecycleManager.Cancelable;
import org.apache.druid.sql.SqlPlanningException;
import org.apache.druid.sql.SqlRowTransformer;
import org.apache.druid.sql.SqlStatementFactory;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
@ -63,6 +64,7 @@ import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.util.List;
import java.util.Set;
@ -78,7 +80,7 @@ public class SqlResource
private final ObjectMapper jsonMapper;
private final AuthorizerMapper authorizerMapper;
private final SqlLifecycleFactory sqlLifecycleFactory;
private final SqlStatementFactory sqlLifecycleFactory;
private final SqlLifecycleManager sqlLifecycleManager;
private final ServerConfig serverConfig;
@ -86,7 +88,7 @@ public class SqlResource
public SqlResource(
@Json ObjectMapper jsonMapper,
AuthorizerMapper authorizerMapper,
SqlLifecycleFactory sqlLifecycleFactory,
SqlStatementFactory sqlLifecycleFactory,
SqlLifecycleManager sqlLifecycleManager,
ServerConfig serverConfig
)
@ -106,23 +108,14 @@ public class SqlResource
@Context final HttpServletRequest req
) throws IOException
{
final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), new QueryContext(sqlQuery.getContext()));
final String remoteAddr = req.getRemoteAddr();
final HttpStatement stmt = sqlLifecycleFactory.httpStatement(sqlQuery, req);
final String sqlQueryId = stmt.sqlQueryId();
final String currThreadName = Thread.currentThread().getName();
try {
Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId));
lifecycle.setParameters(sqlQuery.getParameterList());
lifecycle.validateAndAuthorize(req);
// must add after lifecycle is authorized
sqlLifecycleManager.add(sqlQueryId, lifecycle);
lifecycle.plan();
final SqlRowTransformer rowTransformer = lifecycle.createRowTransformer();
final Sequence<Object[]> sequence = lifecycle.execute();
final Sequence<Object[]> sequence = stmt.execute();
final SqlRowTransformer rowTransformer = stmt.createRowTransformer();
final Yielder<Object[]> yielder0 = Yielders.each(sequence);
try {
@ -165,7 +158,7 @@ public class SqlResource
}
finally {
yielder.close();
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, os.getCount());
endLifecycle(stmt, e, os.getCount());
}
}
)
@ -184,36 +177,37 @@ public class SqlResource
}
}
catch (QueryCapacityExceededException cap) {
endLifecycle(sqlQueryId, lifecycle, cap, remoteAddr, -1);
endLifecycle(stmt, cap, -1);
return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, cap, sqlQueryId);
}
catch (QueryUnsupportedException unsupported) {
endLifecycle(sqlQueryId, lifecycle, unsupported, remoteAddr, -1);
endLifecycle(stmt, unsupported, -1);
return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, unsupported, sqlQueryId);
}
catch (QueryTimeoutException timeout) {
endLifecycle(sqlQueryId, lifecycle, timeout, remoteAddr, -1);
endLifecycle(stmt, timeout, -1);
return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, timeout, sqlQueryId);
}
catch (BadQueryException e) {
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
endLifecycle(stmt, e, -1);
return buildNonOkResponse(BadQueryException.STATUS_CODE, e, sqlQueryId);
}
catch (ForbiddenException e) {
endLifecycleWithoutEmittingMetrics(sqlQueryId, lifecycle);
endLifecycleWithoutEmittingMetrics(stmt);
throw (ForbiddenException) serverConfig.getErrorResponseTransformStrategy()
.transformIfNeeded(e); // let ForbiddenExceptionMapper handle this
}
catch (RelOptPlanner.CannotPlanException e) {
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
endLifecycle(stmt, e, -1);
SqlPlanningException spe = new SqlPlanningException(SqlPlanningException.PlanningError.UNSUPPORTED_SQL_ERROR,
e.getMessage());
return buildNonOkResponse(BadQueryException.STATUS_CODE, spe, sqlQueryId);
}
// calcite throws a java.lang.AssertionError which is type error not exception. using throwable will catch all
// Calcite throws a java.lang.AssertionError which is type error not exception.
// Using throwable will catch all.
catch (Throwable e) {
log.warn(e, "Failed to handle query: %s", sqlQuery);
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
endLifecycle(stmt, e, -1);
return buildNonOkResponse(
Status.INTERNAL_SERVER_ERROR.getStatusCode(),
@ -227,23 +221,27 @@ public class SqlResource
}
private void endLifecycleWithoutEmittingMetrics(
String sqlQueryId,
SqlLifecycle lifecycle
HttpStatement stmt
)
{
sqlLifecycleManager.remove(sqlQueryId, lifecycle);
sqlLifecycleManager.remove(stmt.sqlQueryId(), stmt);
stmt.closeQuietly();
}
private void endLifecycle(
String sqlQueryId,
SqlLifecycle lifecycle,
HttpStatement stmt,
@Nullable final Throwable e,
@Nullable final String remoteAddress,
final long bytesWritten
)
{
lifecycle.finalizeStateAndEmitLogsAndMetrics(e, remoteAddress, bytesWritten);
sqlLifecycleManager.remove(sqlQueryId, lifecycle);
SqlExecutionReporter reporter = stmt.reporter();
if (e == null) {
reporter.succeeded(bytesWritten);
} else {
reporter.failed(e);
}
sqlLifecycleManager.remove(stmt.sqlQueryId(), stmt);
stmt.close();
}
private Response buildNonOkResponse(int status, SanitizableException e, String sqlQueryId)
@ -270,13 +268,18 @@ public class SqlResource
{
log.debug("Received cancel request for query [%s]", sqlQueryId);
List<SqlLifecycle> lifecycles = sqlLifecycleManager.getAll(sqlQueryId);
List<Cancelable> lifecycles = sqlLifecycleManager.getAll(sqlQueryId);
if (lifecycles.isEmpty()) {
return Response.status(Status.NOT_FOUND).build();
}
// Considers only datasource and table resources; not context
// key resources when checking permissions. This means that a user's
// permission to cancel a query depends on the datasource, not the
// context variables used in the query.
Set<ResourceAction> resources = lifecycles
.stream()
.flatMap(lifecycle -> lifecycle.getRequiredResourceActions().stream())
.flatMap(lifecycle -> lifecycle.resources().stream())
.collect(Collectors.toSet());
Access access = AuthorizationUtils.authorizeAllResourceActions(
req,
@ -287,7 +290,7 @@ public class SqlResource
if (access.isAllowed()) {
// should remove only the lifecycles in the snapshot.
sqlLifecycleManager.removeAll(sqlQueryId, lifecycles);
lifecycles.forEach(SqlLifecycle::cancel);
lifecycles.forEach(Cancelable::cancel);
return Response.status(Status.ACCEPTED).build();
} else {
return Response.status(Status.FORBIDDEN).build();

View File

@ -0,0 +1,28 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#-------------------------------------------------------------
#
# Properties for Calcite (formerly known as "Saffron").
# Set here to ensure that the properties are absolutely,
# positively read when Calcite first initializes.
#
# This file _should_ be redundant with Calcites, but
# there do seem to be race conditions at various times.
calcite.default.charset=UTF-16LE
calcite.default.nationalcharset=UTF-16LE
calcite.default.collation.name=UTF-16LE$en_US

View File

@ -20,13 +20,15 @@
package org.apache.druid.sql;
import com.google.common.collect.ImmutableList;
import org.apache.druid.sql.SqlLifecycle.State;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlLifecycleManager.Cancelable;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class SqlLifecycleManagerTest
{
@ -38,18 +40,11 @@ public class SqlLifecycleManagerTest
lifecycleManager = new SqlLifecycleManager();
}
@Test
public void testAddNonAuthorizedLifeCycle()
{
SqlLifecycle lifecycle = mockLifecycle(State.INITIALIZED);
Assert.assertThrows(AssertionError.class, () -> lifecycleManager.add("sqlId", lifecycle));
}
@Test
public void testAddAuthorizedLifecycle()
{
final String sqlId = "sqlId";
SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED);
Cancelable lifecycle = mockLifecycle();
lifecycleManager.add(sqlId, lifecycle);
Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId));
}
@ -58,7 +53,7 @@ public class SqlLifecycleManagerTest
public void testRemoveValidLifecycle()
{
final String sqlId = "sqlId";
SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED);
Cancelable lifecycle = mockLifecycle();
lifecycleManager.add(sqlId, lifecycle);
Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId));
lifecycleManager.remove(sqlId, lifecycle);
@ -69,7 +64,7 @@ public class SqlLifecycleManagerTest
public void testRemoveInvalidSqlQueryId()
{
final String sqlId = "sqlId";
SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED);
Cancelable lifecycle = mockLifecycle();
lifecycleManager.add(sqlId, lifecycle);
Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId));
lifecycleManager.remove("invalid", lifecycle);
@ -80,10 +75,10 @@ public class SqlLifecycleManagerTest
public void testRemoveValidSqlQueryIdDifferntLifecycleObject()
{
final String sqlId = "sqlId";
SqlLifecycle lifecycle = mockLifecycle(State.AUTHORIZED);
Cancelable lifecycle = mockLifecycle();
lifecycleManager.add(sqlId, lifecycle);
Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId));
lifecycleManager.remove(sqlId, mockLifecycle(State.AUTHORIZED));
lifecycleManager.remove(sqlId, mockLifecycle());
Assert.assertEquals(ImmutableList.of(lifecycle), lifecycleManager.getAll(sqlId));
}
@ -91,10 +86,10 @@ public class SqlLifecycleManagerTest
public void testRemoveAllValidSqlQueryIdSubsetOfLifecycles()
{
final String sqlId = "sqlId";
final List<SqlLifecycle> lifecycles = ImmutableList.of(
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED)
final List<Cancelable> lifecycles = ImmutableList.of(
mockLifecycle(),
mockLifecycle(),
mockLifecycle()
);
lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle));
Assert.assertEquals(lifecycles, lifecycleManager.getAll(sqlId));
@ -106,10 +101,10 @@ public class SqlLifecycleManagerTest
public void testRemoveAllInvalidSqlQueryId()
{
final String sqlId = "sqlId";
final List<SqlLifecycle> lifecycles = ImmutableList.of(
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED)
final List<Cancelable> lifecycles = ImmutableList.of(
mockLifecycle(),
mockLifecycle(),
mockLifecycle()
);
lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle));
Assert.assertEquals(lifecycles, lifecycleManager.getAll(sqlId));
@ -121,22 +116,34 @@ public class SqlLifecycleManagerTest
public void testGetAllReturnsListCopy()
{
final String sqlId = "sqlId";
final List<SqlLifecycle> lifecycles = ImmutableList.of(
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED),
mockLifecycle(State.AUTHORIZED)
final List<Cancelable> lifecycles = ImmutableList.of(
mockLifecycle(),
mockLifecycle(),
mockLifecycle()
);
lifecycles.forEach(lifecycle -> lifecycleManager.add(sqlId, lifecycle));
final List<SqlLifecycle> lifecyclesFromGetAll = lifecycleManager.getAll(sqlId);
final List<Cancelable> lifecyclesFromGetAll = lifecycleManager.getAll(sqlId);
lifecycleManager.removeAll(sqlId, lifecyclesFromGetAll);
Assert.assertEquals(lifecycles, lifecyclesFromGetAll);
Assert.assertTrue(lifecycleManager.getAll(sqlId).isEmpty());
}
private static SqlLifecycle mockLifecycle(State state)
private static Cancelable mockLifecycle()
{
SqlLifecycle lifecycle = Mockito.mock(SqlLifecycle.class);
Mockito.when(lifecycle.getState()).thenReturn(state);
return lifecycle;
return new MockCancellable();
}
private static class MockCancellable implements Cancelable
{
@Override
public Set<ResourceAction> resources()
{
return Collections.emptySet();
}
@Override
public void cancel()
{
}
}
}

View File

@ -1,316 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.avatica.SqlType;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceEventBuilder;
import org.apache.druid.query.DefaultQueryConfig;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.http.SqlParameter;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
public class SqlLifecycleTest
{
private PlannerFactory plannerFactory;
private ServiceEmitter serviceEmitter;
private RequestLogger requestLogger;
private SqlLifecycleFactory sqlLifecycleFactory;
private DefaultQueryConfig defaultQueryConfig;
@Before
public void setup()
{
this.plannerFactory = EasyMock.createMock(PlannerFactory.class);
this.serviceEmitter = EasyMock.createMock(ServiceEmitter.class);
this.requestLogger = EasyMock.createMock(RequestLogger.class);
this.defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of("DEFAULT_KEY", "DEFAULT_VALUE"));
this.sqlLifecycleFactory = new SqlLifecycleFactory(
plannerFactory,
serviceEmitter,
requestLogger,
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
new AuthConfig(),
Suppliers.ofInstance(defaultQueryConfig)
);
}
@Test
public void testIgnoredQueryContextParametersAreIgnored()
{
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
final Map<String, Object> queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true");
lifecycle.initialize(sql, new QueryContext(queryContext));
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size());
// should contain only query id, not bySegment since it is not valid for SQL
Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(PlannerContext.CTX_SQL_QUERY_ID));
}
@Test
public void testDefaultQueryContextIsApplied()
{
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
// lifecycle should not have a query context is there on it when created/factorized
Assert.assertNull(lifecycle.getQueryContext());
final String sql = "select 1 + ?";
final Map<String, Object> queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true");
QueryContext testQueryContext = new QueryContext(queryContext);
// default query context isn't applied to query context until lifecycle is initialized
for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) {
Assert.assertFalse(testQueryContext.getMergedParams().containsKey(defaultContextKey));
}
lifecycle.initialize(sql, testQueryContext);
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
Assert.assertEquals(2, lifecycle.getQueryContext().getMergedParams().size());
// should lifecycle should contain default query context values after initialization
for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) {
Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(defaultContextKey));
}
}
@Test
public void testStateTransition()
throws ValidationException, SqlParseException, RelConversionException, IOException
{
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState());
// test initialize
lifecycle.initialize(sql, new QueryContext());
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
List<TypedValue> parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue());
lifecycle.setParameters(parameters);
// setting parameters should not change the state
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
// test authorization
DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class);
PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class);
EasyMock.expect(plannerFactory.createPlanner(EasyMock.eq(sql), EasyMock.anyObject())).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once();
mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT);
EasyMock.expectLastCall();
mockPlannerContext.setParameters(parameters);
EasyMock.expectLastCall();
mockPlanner.validate();
EasyMock.expectLastCall();
Set<ResourceAction> mockActions = new HashSet<>();
mockActions.add(new ResourceAction(new Resource("dummy", ResourceType.DATASOURCE), Action.READ));
EasyMock.expect(mockPlanner.resourceActions(EasyMock.eq(false))).andReturn(mockActions).once();
EasyMock.expectLastCall();
EasyMock.expect(mockPlanner.authorize(EasyMock.anyObject(), EasyMock.eq(false))).andReturn(Access.OK).once();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
lifecycle.validateAndAuthorize(CalciteTests.REGULAR_USER_AUTH_RESULT);
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
// test prepare
PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class);
EasyMock.expect(mockPlanner.prepare()).andReturn(mockPrepareResult).once();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
lifecycle.prepare();
// prepare doens't change lifecycle state
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
// test plan
PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class);
EasyMock.expect(mockPlanner.plan()).andReturn(mockPlanResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.plan();
Assert.assertEquals(mockPlannerContext, lifecycle.getPlannerContext());
Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test execute
EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.execute();
Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test emit
EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once();
CopyOnWriteArrayList<String> nativeQueryIds = new CopyOnWriteArrayList<>(ImmutableList.of("id"));
EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(nativeQueryIds).times(2);
EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
requestLogger.logSqlQuery(EasyMock.anyObject());
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.finalizeStateAndEmitLogsAndMetrics(null, null, 10);
Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
}
@Test
public void testStateTransitionHttpRequest()
throws ValidationException, SqlParseException, RelConversionException, IOException
{
// this test is a duplicate of testStateTransition except with a slight
// variation of how validate and authorize is run
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState());
// test initialize
lifecycle.initialize(sql, new QueryContext());
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
List<TypedValue> parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue());
lifecycle.setParameters(parameters);
// setting parameters should not change the state
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
// test authorization
DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class);
PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class);
EasyMock.expect(plannerFactory.createPlanner(EasyMock.eq(sql), EasyMock.anyObject())).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once();
mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT);
EasyMock.expectLastCall();
mockPlannerContext.setParameters(parameters);
EasyMock.expectLastCall();
mockPlanner.validate();
EasyMock.expectLastCall();
Set<ResourceAction> mockActions = new HashSet<>();
mockActions.add(new ResourceAction(new Resource("dummy", ResourceType.DATASOURCE), Action.READ));
EasyMock.expect(mockPlanner.resourceActions(EasyMock.eq(false))).andReturn(mockActions).once();
EasyMock.expectLastCall();
EasyMock.expect(mockPlanner.authorize(EasyMock.anyObject(), EasyMock.eq(false))).andReturn(Access.OK).once();
EasyMock.expectLastCall();
// Note: can't check the request usage with mocks: the code is run
// in a function which the mock doesn't actually call.
HttpServletRequest request = EasyMock.createMock(HttpServletRequest.class);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
lifecycle.validateAndAuthorize(request);
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
// test prepare
PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class);
EasyMock.expect(mockPlanner.prepare()).andReturn(mockPrepareResult).once();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
lifecycle.prepare();
// prepare doens't change lifecycle state
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
// test plan
PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class);
EasyMock.expect(mockPlanner.plan()).andReturn(mockPlanResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.plan();
Assert.assertEquals(mockPlannerContext, lifecycle.getPlannerContext());
Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test execute
EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.execute();
Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test emit
EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once();
CopyOnWriteArrayList<String> nativeQueryIds = new CopyOnWriteArrayList<>(ImmutableList.of("id"));
EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(nativeQueryIds).times(2);
EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
requestLogger.logSqlQuery(EasyMock.anyObject());
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.finalizeStateAndEmitLogsAndMetrics(null, null, 10);
Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
}
}

View File

@ -0,0 +1,469 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.guava.LazySequence;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.DefaultQueryConfig;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.server.QueryScheduler;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.server.log.TestRequestLogger;
import org.apache.druid.server.metrics.NoopServiceEmitter;
import org.apache.druid.server.scheduling.HiLoQueryLaningStrategy;
import org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlPlanningException.PlanningError;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.QueryLogHook;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.http.SqlQuery;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class SqlStatementTest
{
private static QueryRunnerFactoryConglomerate conglomerate;
private static Closer resourceCloser;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public QueryLogHook queryLogHook = QueryLogHook.create();
private SpecificSegmentsQuerySegmentWalker walker = null;
private TestRequestLogger testRequestLogger;
private ListeningExecutorService executorService;
private SqlStatementFactory sqlLifecycleFactory;
private final DefaultQueryConfig defaultQueryConfig = new DefaultQueryConfig(
ImmutableMap.of("DEFAULT_KEY", "DEFAULT_VALUE"));
@BeforeClass
public static void setUpClass()
{
resourceCloser = Closer.create();
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
}
@AfterClass
public static void tearDownClass() throws IOException
{
resourceCloser.close();
}
@Before
public void setUp() throws Exception
{
final QueryScheduler scheduler = new QueryScheduler(
5,
ManualQueryPrioritizationStrategy.INSTANCE,
new HiLoQueryLaningStrategy(40),
new ServerConfig()
)
{
@Override
public <T> Sequence<T> run(Query<?> query, Sequence<T> resultSequence)
{
return super.run(
query,
new LazySequence<T>(() -> {
return resultSequence;
})
);
}
};
executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s"));
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler);
final PlannerConfig plannerConfig = PlannerConfig.builder().serializeComplexValues(false).build();
final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(
conglomerate,
walker,
plannerConfig,
CalciteTests.TEST_AUTHORIZER_MAPPER
);
final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
testRequestLogger = new TestRequestLogger();
final PlannerFactory plannerFactory = new PlannerFactory(
rootSchema,
CalciteTests.createMockQueryMakerFactory(walker, conglomerate),
operatorTable,
macroTable,
plannerConfig,
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper(),
CalciteTests.DRUID_SCHEMA_NAME,
new CalciteRulesManager(ImmutableSet.of())
);
this.sqlLifecycleFactory = new SqlStatementFactory(
plannerFactory,
new NoopServiceEmitter(),
testRequestLogger,
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
new AuthConfig(),
Suppliers.ofInstance(defaultQueryConfig),
new SqlLifecycleManager()
);
}
@After
public void tearDown() throws Exception
{
walker.close();
walker = null;
executorService.shutdownNow();
executorService.awaitTermination(2, TimeUnit.SECONDS);
}
HttpServletRequest request(boolean ok)
{
HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class);
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH))
.andReturn(null)
.anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED))
.andReturn(null)
.anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, ok);
EasyMock.expectLastCall().anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
EasyMock.replay(req);
return req;
}
//-----------------------------------------------------------------
// Direct statements: using an auth result for verification.
private SqlQueryPlus queryPlus(final String sql, final AuthenticationResult authResult)
{
return SqlQueryPlus.builder(sql).auth(authResult).build();
}
@Test
public void testDirectHappyPath()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo",
CalciteTests.REGULAR_USER_AUTH_RESULT);
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
List<Object[]> results = stmt.execute().toList();
assertEquals(1, results.size());
assertEquals(6L, results.get(0)[0]);
assertEquals("foo", results.get(0)[1]);
}
@Test
public void testDirectSyntaxError()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS",
CalciteTests.REGULAR_USER_AUTH_RESULT);
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
try {
stmt.execute();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testDirectValidationError()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus",
CalciteTests.REGULAR_USER_AUTH_RESULT);
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
try {
stmt.execute();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testDirectPermissionError()
{
SqlQueryPlus sqlReq = queryPlus(
"select count(*) from forbiddenDatasource",
CalciteTests.REGULAR_USER_AUTH_RESULT);
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
try {
stmt.execute();
fail();
}
catch (ForbiddenException e) {
// Expected
}
}
//-----------------------------------------------------------------
// HTTP statements: using a servlet request for verification.
private SqlQuery makeQuery(String sql)
{
return new SqlQuery(
sql,
null,
false,
false,
false,
null,
null
);
}
@Test
public void testHttpHappyPath()
{
HttpStatement stmt = sqlLifecycleFactory.httpStatement(
makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo"),
request(true)
);
List<Object[]> results = stmt.execute().toList();
assertEquals(1, results.size());
assertEquals(6L, results.get(0)[0]);
assertEquals("foo", results.get(0)[1]);
}
@Test
public void testHttpSyntaxError()
{
HttpStatement stmt = sqlLifecycleFactory.httpStatement(
makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS"),
request(true)
);
try {
stmt.execute();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testHttpValidationError()
{
HttpStatement stmt = sqlLifecycleFactory.httpStatement(
makeQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus"),
request(true)
);
try {
stmt.execute();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testHttpPermissionError()
{
HttpStatement stmt = sqlLifecycleFactory.httpStatement(
makeQuery("select count(*) from forbiddenDatasource"),
request(false)
);
try {
stmt.execute();
fail();
}
catch (ForbiddenException e) {
// Expected
}
}
//-----------------------------------------------------------------
// Prepared statements: using a prepare/execute model.
@Test
public void testJdbcHappyPath()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo",
CalciteTests.REGULAR_USER_AUTH_RESULT);
PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq);
PrepareResult prepareResult = stmt.prepare();
RelDataType rowType = prepareResult.getRowType();
assertEquals(2, rowType.getFieldCount());
List<RelDataTypeField> fields = rowType.getFieldList();
assertEquals("cnt", fields.get(0).getName());
assertEquals("BIGINT", fields.get(0).getType().toString());
assertEquals("TheFoo", fields.get(1).getName());
assertEquals("CHAR(3)", fields.get(1).getType().toString());
// JDBC supports a prepare once, execute many model
for (int i = 0; i < 3; i++) {
List<Object[]> results = stmt
.execute(Collections.emptyList())
.execute()
.toList();
assertEquals(1, results.size());
assertEquals(6L, results.get(0)[0]);
assertEquals("foo", results.get(0)[1]);
}
}
@Test
public void testJdbcSyntaxError()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS",
CalciteTests.REGULAR_USER_AUTH_RESULT);
PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq);
try {
stmt.prepare();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.SQL_PARSE_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testJdbcValidationError()
{
SqlQueryPlus sqlReq = queryPlus(
"SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.bogus",
CalciteTests.REGULAR_USER_AUTH_RESULT);
PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq);
try {
stmt.prepare();
fail();
}
catch (SqlPlanningException e) {
// Expected
assertEquals(PlanningError.VALIDATION_ERROR.getErrorCode(), e.getErrorCode());
}
}
@Test
public void testJdbcPermissionError()
{
SqlQueryPlus sqlReq = queryPlus(
"select count(*) from forbiddenDatasource",
CalciteTests.REGULAR_USER_AUTH_RESULT);
PreparedStatement stmt = sqlLifecycleFactory.preparedStatement(sqlReq);
try {
stmt.prepare();
fail();
}
catch (ForbiddenException e) {
// Expected
}
}
//-----------------------------------------------------------------
// Generic tests.
@Test
public void testIgnoredQueryContextParametersAreIgnored()
{
SqlQueryPlus sqlReq = SqlQueryPlus
.builder("select 1 + ?")
.context(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"))
.auth(CalciteTests.REGULAR_USER_AUTH_RESULT)
.build();
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
Map<String, Object> context = stmt.sqlRequest().context().getMergedParams();
Assert.assertEquals(2, context.size());
// should contain only query id, not bySegment since it is not valid for SQL
Assert.assertTrue(context.containsKey(PlannerContext.CTX_SQL_QUERY_ID));
}
@Test
public void testDefaultQueryContextIsApplied()
{
SqlQueryPlus sqlReq = SqlQueryPlus
.builder("select 1 + ?")
.context(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"))
.auth(CalciteTests.REGULAR_USER_AUTH_RESULT)
.build();
DirectStatement stmt = sqlLifecycleFactory.directStatement(sqlReq);
Map<String, Object> context = stmt.sqlRequest().context().getMergedParams();
Assert.assertEquals(2, context.size());
// Statement should contain default query context values
for (String defaultContextKey : defaultQueryConfig.getContext().keySet()) {
Assert.assertTrue(context.containsKey(defaultContextKey));
}
}
}

View File

@ -29,9 +29,7 @@ import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Names;
@ -42,9 +40,9 @@ import org.apache.calcite.avatica.MissingResultsException;
import org.apache.calcite.avatica.NoSuchStatementException;
import org.apache.calcite.avatica.server.AbstractAvaticaHandler;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.GuiceInjectors;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.initialization.Initialization;
import org.apache.druid.guice.StartupInjectorBuilder;
import org.apache.druid.initialization.CoreInjectorBuilder;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
@ -194,46 +192,39 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER);
testRequestLogger = new TestRequestLogger();
injector = Initialization.makeInjectorWithModules(
GuiceInjectors.makeStartupInjector(),
ImmutableList.of(
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER);
binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER);
binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR);
binder.bind(RequestLogger.class).toInstance(testRequestLogger);
binder.bind(DruidSchemaCatalog.class).toInstance(rootSchema);
for (NamedSchema schema : rootSchema.getNamedSchemas().values()) {
Multibinder.newSetBinder(binder, NamedSchema.class).addBinding().toInstance(schema);
}
binder.bind(QueryLifecycleFactory.class)
.toInstance(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate));
binder.bind(DruidOperatorTable.class).toInstance(operatorTable);
binder.bind(ExprMacroTable.class).toInstance(macroTable);
binder.bind(PlannerConfig.class).toInstance(plannerConfig);
binder.bind(String.class)
.annotatedWith(DruidSchemaName.class)
.toInstance(CalciteTests.DRUID_SCHEMA_NAME);
binder.bind(AvaticaServerConfig.class).toInstance(AVATICA_CONFIG);
binder.bind(ServiceEmitter.class).to(NoopServiceEmitter.class);
binder.bind(QuerySchedulerProvider.class).in(LazySingleton.class);
binder.bind(QueryScheduler.class)
.toProvider(QuerySchedulerProvider.class)
.in(LazySingleton.class);
binder.bind(QueryMakerFactory.class).to(NativeQueryMakerFactory.class);
binder.bind(new TypeLiteral<Supplier<DefaultQueryConfig>>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())));
binder.bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of()));
}
injector = new CoreInjectorBuilder(new StartupInjectorBuilder().build())
.addModule(binder -> {
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER);
binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER);
binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR);
binder.bind(RequestLogger.class).toInstance(testRequestLogger);
binder.bind(DruidSchemaCatalog.class).toInstance(rootSchema);
for (NamedSchema schema : rootSchema.getNamedSchemas().values()) {
Multibinder.newSetBinder(binder, NamedSchema.class).addBinding().toInstance(schema);
}
)
);
binder.bind(QueryLifecycleFactory.class)
.toInstance(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate));
binder.bind(DruidOperatorTable.class).toInstance(operatorTable);
binder.bind(ExprMacroTable.class).toInstance(macroTable);
binder.bind(PlannerConfig.class).toInstance(plannerConfig);
binder.bind(String.class)
.annotatedWith(DruidSchemaName.class)
.toInstance(CalciteTests.DRUID_SCHEMA_NAME);
binder.bind(AvaticaServerConfig.class).toInstance(AVATICA_CONFIG);
binder.bind(ServiceEmitter.class).to(NoopServiceEmitter.class);
binder.bind(QuerySchedulerProvider.class).in(LazySingleton.class);
binder.bind(QueryScheduler.class)
.toProvider(QuerySchedulerProvider.class)
.in(LazySingleton.class);
binder.bind(QueryMakerFactory.class).to(NativeQueryMakerFactory.class);
binder.bind(new TypeLiteral<Supplier<DefaultQueryConfig>>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())));
binder.bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of()));
}
)
.build();
druidMeta = injector.getInstance(DruidMeta.class);
final AbstractAvaticaHandler handler = this.getAvaticaHandler(druidMeta);
@ -779,10 +770,9 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
@Test
public void testTooManyStatements() throws SQLException
{
client.createStatement();
client.createStatement();
client.createStatement();
client.createStatement();
for (int i = 0; i < 4; i++) {
client.createStatement();
}
expectedException.expect(AvaticaClientRuntimeException.class);
expectedException.expectMessage("Too many open statements, limit is [4]");
@ -792,16 +782,9 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
@Test
public void testNotTooManyStatementsWhenYouCloseThem() throws SQLException
{
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
client.createStatement().close();
for (int i = 0; i < 10; i++) {
client.createStatement().close();
}
}
/**
@ -1133,8 +1116,13 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
catch (SQLException e) {
// Expected
}
// SqlLifecycle does not allow logging for security failures.
Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size());
Assert.assertEquals(1, testRequestLogger.getSqlQueryLogs().size());
{
final Map<String, Object> stats = testRequestLogger.getSqlQueryLogs().get(0).getQueryStats().getStats();
Assert.assertEquals(false, stats.get("success"));
Assert.assertEquals("regularUser", stats.get("identity"));
Assert.assertTrue(stats.containsKey("exception"));
}
}
@Test
@ -1180,8 +1168,13 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
catch (SQLException e) {
// Expected
}
// SqlLifecycle does not allow logging for security failures.
Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size());
Assert.assertEquals(1, testRequestLogger.getSqlQueryLogs().size());
{
final Map<String, Object> stats = testRequestLogger.getSqlQueryLogs().get(0).getQueryStats().getStats();
Assert.assertEquals(false, stats.get("success"));
Assert.assertEquals("regularUser", stats.get("identity"));
Assert.assertTrue(stats.containsKey("exception"));
}
}
@Test
@ -1519,6 +1512,28 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
}
}
/**
* Verify that a security exception is mapped to the correct Avatica SQL error codes.
*/
@Test
public void testUnauthorizedTable()
{
final String query = "SELECT * FROM " + CalciteTests.FORBIDDEN_DATASOURCE;
final String expectedError = "Error 2 (00002) : Error while executing SQL \"" +
query + "\": Remote driver error: Unauthorized";
try (Statement statement = client.createStatement()) {
statement.executeQuery(query);
}
catch (SQLException e) {
Assert.assertEquals(
e.getMessage(),
expectedError
);
return;
}
Assert.fail("Test failed, did not get SQLException");
}
// Default implementation is for JSON to allow debugging of tests.
protected String getJdbcConnectionString(final int port)
{

View File

@ -20,23 +20,22 @@
package org.apache.druid.sql.avatica;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.apache.calcite.avatica.ColumnMetaData;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.tools.RelConversionException;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.security.AllowAllAuthenticator;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
@ -93,8 +92,7 @@ public class DruidStatementTest extends CalciteTestBase
}
private SpecificSegmentsQuerySegmentWalker walker;
private SqlLifecycleFactory sqlLifecycleFactory;
private DruidConnection conn;
private SqlStatementFactory sqlLifecycleFactory;
@Before
public void setUp() throws Exception
@ -116,14 +114,12 @@ public class DruidStatementTest extends CalciteTestBase
CalciteTests.DRUID_SCHEMA_NAME,
new CalciteRulesManager(ImmutableSet.of())
);
sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory);
conn = new DruidConnection("dummy", 4, ImmutableMap.of(), ImmutableMap.of());
this.sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory);
}
@After
public void tearDown() throws Exception
{
conn.close();
walker.close();
walker = null;
}
@ -139,14 +135,15 @@ public class DruidStatementTest extends CalciteTestBase
private DruidJdbcStatement jdbcStatement()
{
return new DruidJdbcStatement(
conn,
"",
0,
new QueryContext(),
sqlLifecycleFactory
);
}
@Test
public void testSubQueryWithOrderByDirect() throws RelConversionException
public void testSubQueryWithOrderByDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SUB_QUERY_WITH_ORDER_BY,
@ -167,7 +164,7 @@ public class DruidStatementTest extends CalciteTestBase
}
@Test
public void testFetchPastEOFDirect() throws RelConversionException
public void testFetchPastEOFDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SUB_QUERY_WITH_ORDER_BY,
@ -210,22 +207,6 @@ public class DruidStatementTest extends CalciteTestBase
}
}
@Test
public void testSignatureDirect() throws RelConversionException
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_STAR_FROM_FOO,
null,
null,
AllowAllAuthenticator.ALLOW_ALL_RESULT
);
try (final DruidJdbcStatement statement = jdbcStatement()) {
// Check signature.
statement.execute(queryPlus, -1);
verifySignature(statement.getSignature());
}
}
/**
* Ensure an error is thrown if the client attempts to fetch from a
* statement after its result set is closed.
@ -253,7 +234,7 @@ public class DruidStatementTest extends CalciteTestBase
}
@Test
public void testSubQueryWithOrderByDirectTwice() throws RelConversionException
public void testSubQueryWithOrderByDirectTwice()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SUB_QUERY_WITH_ORDER_BY,
@ -298,7 +279,7 @@ public class DruidStatementTest extends CalciteTestBase
}
@Test
public void testSelectAllInFirstFrameDirect() throws RelConversionException
public void testSelectAllInFirstFrameDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_FROM_FOO,
@ -338,10 +319,9 @@ public class DruidStatementTest extends CalciteTestBase
/**
* Test results spread over two frames. Also checks various state-related
* methods.
* @throws RelConversionException
*/
@Test
public void testSelectSplitOverTwoFramesDirect() throws RelConversionException
public void testSelectSplitOverTwoFramesDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_FROM_FOO,
@ -376,10 +356,9 @@ public class DruidStatementTest extends CalciteTestBase
/**
* Verify that JDBC automatically closes the first result set when we
* open a second for the same statement.
* @throws RelConversionException
*/
@Test
public void testTwoFramesAutoCloseDirect() throws RelConversionException
public void testTwoFramesAutoCloseDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_FROM_FOO,
@ -419,10 +398,9 @@ public class DruidStatementTest extends CalciteTestBase
/**
* Test that closing a statement with pending results automatically
* closes the underlying result set.
* @throws RelConversionException
*/
@Test
public void testTwoFramesCloseWithResultSetDirect() throws RelConversionException
public void testTwoFramesCloseWithResultSetDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_FROM_FOO,
@ -476,6 +454,22 @@ public class DruidStatementTest extends CalciteTestBase
);
}
@Test
public void testSignatureDirect()
{
SqlQueryPlus queryPlus = new SqlQueryPlus(
SELECT_STAR_FROM_FOO,
null,
null,
AllowAllAuthenticator.ALLOW_ALL_RESULT
);
try (final DruidJdbcStatement statement = jdbcStatement()) {
// Check signature.
statement.execute(queryPlus, -1);
verifySignature(statement.getSignature());
}
}
@SuppressWarnings("unchecked")
private void verifySignature(Meta.Signature signature)
{
@ -520,10 +514,9 @@ public class DruidStatementTest extends CalciteTestBase
private DruidJdbcPreparedStatement jdbcPreparedStatement(SqlQueryPlus queryPlus)
{
return new DruidJdbcPreparedStatement(
conn,
"",
0,
queryPlus,
sqlLifecycleFactory,
sqlLifecycleFactory.preparedStatement(queryPlus),
Long.MAX_VALUE
);
}

View File

@ -28,6 +28,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.druid.annotations.UsedByJUnitParamsRunner;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.hll.VersionOneHyperLogLogCollector;
@ -45,7 +46,6 @@ import org.apache.druid.query.DataSource;
import org.apache.druid.query.Druids;
import org.apache.druid.query.JoinDataSource;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryDataSource;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
@ -84,8 +84,10 @@ import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.PreparedStatement;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
@ -96,6 +98,7 @@ import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager;
import org.apache.druid.sql.calcite.table.RowSignatures;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.QueryLogHook;
@ -117,6 +120,7 @@ import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@ -149,77 +153,36 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public static final Logger log = new Logger(BaseCalciteQueryTest.class);
public static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig();
public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE = new PlannerConfig()
{
@Override
public boolean shouldSerializeComplexValues()
{
return false;
}
};
public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION = new PlannerConfig()
{
@Override
public boolean isRequireTimeCondition()
{
return true;
}
};
public static final PlannerConfig PLANNER_CONFIG_NO_TOPN = new PlannerConfig()
{
@Override
public int getMaxTopNLimit()
{
return 0;
}
};
public static final PlannerConfig PLANNER_CONFIG_NO_HLL = new PlannerConfig()
{
@Override
public boolean isUseApproximateCountDistinct()
{
return false;
}
};
public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES = new PlannerConfig()
{
@Override
public DateTimeZone getSqlTimeZone()
{
return DateTimes.inferTzFromString("America/Los_Angeles");
}
};
public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE =
PlannerConfig.builder().serializeComplexValues(false).build();
public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES = new PlannerConfig()
{
@Override
public boolean isAuthorizeSystemTablesDirectly()
{
return true;
}
};
public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION =
PlannerConfig.builder().requireTimeCondition(true).build();
public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN = new PlannerConfig()
{
@Override
public boolean isUseNativeQueryExplain()
{
return true;
}
};
public static final PlannerConfig PLANNER_CONFIG_NO_TOPN =
PlannerConfig.builder().maxTopNLimit(0).build();
public static final PlannerConfig PLANNER_CONFIG_NO_HLL =
PlannerConfig.builder().useApproximateCountDistinct(false).build();
public static final String LOS_ANGELES = "America/Los_Angeles";
public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES =
PlannerConfig
.builder()
.sqlTimeZone(DateTimes.inferTzFromString(LOS_ANGELES))
.build();
public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES =
PlannerConfig.builder().authorizeSystemTablesDirectly(true).build();
public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN =
PlannerConfig.builder().useNativeQueryExplain(true).build();
public static final int MAX_NUM_IN_FILTERS = 100;
public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER = new PlannerConfig()
{
@Override
public int getMaxNumericInFilters()
{
return MAX_NUM_IN_FILTERS;
}
};
public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER =
PlannerConfig.builder().maxNumericInFilters(MAX_NUM_IN_FILTERS).build();
public static final String DUMMY_SQL_ID = "dummy";
public static final String LOS_ANGELES = "America/Los_Angeles";
private static final ImmutableMap.Builder<String, Object> DEFAULT_QUERY_CONTEXT_BUILDER =
ImmutableMap.<String, Object>builder()
@ -644,9 +607,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQuery(
final String sql,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -662,10 +625,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQuery(
final String sql,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults,
final RowSignature expectedResultRowSignature
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -682,9 +645,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQuery(
final String sql,
final Map<String, Object> context,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -700,10 +663,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQuery(
final String sql,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults,
final List<SqlParameter> parameters
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -721,9 +684,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final PlannerConfig plannerConfig,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
)
{
testQuery(
plannerConfig,
@ -740,9 +703,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQuery(
final String sql,
final Map<String, Object> context,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final ResultsVerifier expectedResultsVerifier
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -761,9 +724,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final Map<String, Object> queryContext,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
)
{
log.info("SQL: %s", sql);
queryLogHook.clearRecordedQueries();
@ -778,9 +741,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final List<SqlParameter> parameters,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
)
{
testQuery(
plannerConfig,
@ -800,10 +763,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final List<SqlParameter> parameters,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults,
final RowSignature expectedResultSignature
) throws Exception
)
{
testQuery(
plannerConfig,
@ -823,10 +786,10 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final List<SqlParameter> parameters,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final ResultsVerifier expectedResultsVerifier,
@Nullable final Consumer<ExpectedException> expectedExceptionInitializer
) throws Exception
)
{
log.info("SQL: %s", sql);
@ -849,8 +812,8 @@ public class BaseCalciteQueryTest extends CalciteTestBase
theQueryContext.put(QueryContexts.VECTOR_SIZE_KEY, 2); // Small vector size to ensure we use more than one.
}
final List<Query> theQueries = new ArrayList<>();
for (Query query : expectedQueries) {
final List<Query<?>> theQueries = new ArrayList<>();
for (Query<?> query : expectedQueries) {
theQueries.add(recursivelyOverrideContext(query, theQueryContext));
}
@ -872,7 +835,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final List<SqlParameter> parameters,
final String sql,
final AuthenticationResult authenticationResult
) throws Exception
)
{
return getResults(
plannerConfig,
@ -897,9 +860,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
final ExprMacroTable macroTable,
final AuthorizerMapper authorizerMapper,
final ObjectMapper objectMapper
) throws Exception
)
{
final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory(
final SqlStatementFactory sqlLifecycleFactory = getSqlLifecycleFactory(
plannerConfig,
new AuthConfig(),
operatorTable,
@ -907,23 +870,24 @@ public class BaseCalciteQueryTest extends CalciteTestBase
authorizerMapper,
objectMapper
);
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
Pair<RowSignature, Sequence<Object[]>> result = lifecycle.runSimple(
sql,
queryContext,
parameters,
authenticationResult
final DirectStatement stmt = sqlLifecycleFactory.directStatement(
SqlQueryPlus.builder(sql)
.context(queryContext)
.sqlParameters(parameters)
.auth(authenticationResult)
.build()
);
Sequence<Object[]> results = stmt.execute();
RelDataType rowType = stmt.prepareResult().getRowType();
return new Pair<>(
result.lhs,
result.rhs.toList()
RowSignatures.fromRelDataType(rowType.getFieldNames(), rowType),
results.toList()
);
}
public void verifyResults(
final String sql,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final List<Object[]> expectedResults,
final Pair<RowSignature, List<Object[]>> results
)
@ -933,7 +897,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void verifyResults(
final String sql,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final Pair<RowSignature, List<Object[]>> results,
final ResultsVerifier expectedResultsVerifier
)
@ -950,7 +914,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
private void verifyQueries(
final String sql,
@Nullable final List<Query> expectedQueries
@Nullable final List<Query<?>> expectedQueries
)
{
if (expectedQueries != null) {
@ -997,7 +961,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
}
public void testQueryThrows(final String sql, Consumer<ExpectedException> expectedExceptionInitializer)
throws Exception
{
testQueryThrows(sql, new HashMap<>(QUERY_CONTEXT_DEFAULT), ImmutableList.of(), expectedExceptionInitializer);
}
@ -1005,9 +968,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public void testQueryThrows(
final String sql,
final Map<String, Object> queryContext,
final List<Query> expectedQueries,
final List<Query<?>> expectedQueries,
final Consumer<ExpectedException> expectedExceptionInitializer
) throws Exception
)
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -1038,7 +1001,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
AuthenticationResult authenticationResult
)
{
SqlLifecycleFactory lifecycleFactory = getSqlLifecycleFactory(
SqlStatementFactory lifecycleFactory = getSqlLifecycleFactory(
plannerConfig,
authConfig,
createOperatorTable(),
@ -1047,12 +1010,16 @@ public class BaseCalciteQueryTest extends CalciteTestBase
queryJsonMapper
);
SqlLifecycle lifecycle = lifecycleFactory.factorize();
lifecycle.initialize(sql, new QueryContext(contexts));
return lifecycle.runAnalyzeResources(authenticationResult);
PreparedStatement stmt = lifecycleFactory.preparedStatement(SqlQueryPlus.builder(sql)
.context(contexts)
.auth(authenticationResult)
.build()
);
stmt.prepare();
return stmt.allResources();
}
public SqlLifecycleFactory getSqlLifecycleFactory(
public SqlStatementFactory getSqlLifecycleFactory(
PlannerConfig plannerConfig,
AuthConfig authConfig,
DruidOperatorTable operatorTable,
@ -1085,7 +1052,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
CalciteTests.DRUID_SCHEMA_NAME,
new CalciteRulesManager(ImmutableSet.of())
);
final SqlLifecycleFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory, authConfig);
final SqlStatementFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory, authConfig);
viewManager.createView(
plannerFactory,
@ -1166,7 +1133,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
private static DataSource recursivelyOverrideContext(final DataSource dataSource, final Map<String, Object> context)
{
if (dataSource instanceof QueryDataSource) {
final Query subquery = ((QueryDataSource) dataSource).getQuery();
final Query<?> subquery = ((QueryDataSource) dataSource).getQuery();
return new QueryDataSource(recursivelyOverrideContext(subquery, context));
} else {
return dataSource.withChildren(
@ -1236,13 +1203,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
.build(),
};
}
public static Map<String, Object> withOverrides(Map<String, Object> originalContext, Map<String, Object> overrides)
{
Map<String, Object> contextWithOverrides = new HashMap<>(originalContext);
contextWithOverrides.putAll(overrides);
return contextWithOverrides;
}
}
protected Map<String, Object> withLeftDirectAccessEnabled(Map<String, Object> context)

View File

@ -58,6 +58,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -70,7 +71,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
// test some query stuffs, sort of limited since no native array column types so either need to use constructor or
// array aggregator
@Test
public void testSelectConstantArrayExpressionFromTable() throws Exception
public void testSelectConstantArrayExpressionFromTable()
{
testQuery(
"SELECT ARRAY[1,2] as arr, dim1 FROM foo LIMIT 1",
@ -92,7 +93,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByArrayFromCase() throws Exception
public void testGroupByArrayFromCase()
{
cannotVectorize();
testQuery(
@ -121,7 +122,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectNonConstantArrayExpressionFromTable() throws Exception
public void testSelectNonConstantArrayExpressionFromTable()
{
testQuery(
"SELECT ARRAY[CONCAT(dim1, 'word'),'up'] as arr, dim1 FROM foo LIMIT 5",
@ -151,7 +152,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectNonConstantArrayExpressionFromTableForMultival() throws Exception
public void testSelectNonConstantArrayExpressionFromTableForMultival()
{
final String sql = "SELECT ARRAY[CONCAT(dim3, 'word'),'up'] as arr, dim1 FROM foo LIMIT 5";
final Query<?> scanQuery = newScanQueryBuilder()
@ -204,7 +205,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSomeArrayFunctionsWithScanQuery() throws Exception
public void testSomeArrayFunctionsWithScanQuery()
{
// Yes these outputs are strange sometimes, arrays are in a partial state of existence so end up a bit
// stringy for now this is because virtual column selectors are coercing values back to stringish so that
@ -356,7 +357,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSomeArrayFunctionsWithScanQueryNoStringify() throws Exception
public void testSomeArrayFunctionsWithScanQueryNoStringify()
{
// when not stringifying arrays, some things are still stringified, because they are inferred to be typed as strings
// the planner context which controls stringification of arrays does not apply to multi-valued string columns,
@ -474,7 +475,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOverlapFilter() throws Exception
public void testArrayOverlapFilter()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE ARRAY_OVERLAP(dim3, ARRAY['a','b']) LIMIT 5",
@ -497,7 +498,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOverlapFilterNonLiteral() throws Exception
public void testArrayOverlapFilterNonLiteral()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE ARRAY_OVERLAP(dim3, ARRAY[dim2]) LIMIT 5",
@ -519,7 +520,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayContainsFilter() throws Exception
public void testArrayContainsFilter()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY['a','b']) LIMIT 5",
@ -546,7 +547,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayContainsArrayOfOneElement() throws Exception
public void testArrayContainsArrayOfOneElement()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY['a']) LIMIT 5",
@ -568,7 +569,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayContainsArrayOfNonLiteral() throws Exception
public void testArrayContainsArrayOfNonLiteral()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE ARRAY_CONTAINS(dim3, ARRAY[dim2]) LIMIT 5",
@ -591,7 +592,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArraySlice() throws Exception
public void testArraySlice()
{
testQuery(
"SELECT ARRAY_SLICE(dim3, 1) FROM druid.numfoo",
@ -619,7 +620,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayLength() throws Exception
public void testArrayLength()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -662,7 +663,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAppend() throws Exception
public void testArrayAppend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -719,7 +720,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayPrepend() throws Exception
public void testArrayPrepend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -776,7 +777,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayPrependAppend() throws Exception
public void testArrayPrependAppend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -840,7 +841,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayConcat() throws Exception
public void testArrayConcat()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -897,7 +898,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOffset() throws Exception
public void testArrayOffset()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -936,7 +937,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayGroupAsLongArray() throws Exception
public void testArrayGroupAsLongArray()
{
// Cannot vectorize as we donot have support in native query subsytem for grouping on arrays
cannotVectorize();
@ -985,7 +986,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
@Test
public void testArrayGroupAsDoubleArray() throws Exception
public void testArrayGroupAsDoubleArray()
{
// Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys
cannotVectorize();
@ -1034,7 +1035,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayGroupAsFloatArray() throws Exception
public void testArrayGroupAsFloatArray()
{
// Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys
cannotVectorize();
@ -1083,7 +1084,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayGroupAsArrayWithFunction() throws Exception
public void testArrayGroupAsArrayWithFunction()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1127,7 +1128,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOrdinal() throws Exception
public void testArrayOrdinal()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1170,7 +1171,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOffsetOf() throws Exception
public void testArrayOffsetOf()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1219,7 +1220,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayOrdinalOf() throws Exception
public void testArrayOrdinalOf()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1269,7 +1270,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayToString() throws Exception
public void testArrayToString()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1325,7 +1326,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayToStringToMultiValueString() throws Exception
public void testArrayToStringToMultiValueString()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1384,7 +1385,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAgg() throws Exception
public void testArrayAgg()
{
cannotVectorize();
testQuery(
@ -1466,7 +1467,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggMultiValue() throws Exception
public void testArrayAggMultiValue()
{
cannotVectorize();
testQuery(
@ -1524,7 +1525,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggNumeric() throws Exception
public void testArrayAggNumeric()
{
cannotVectorize();
testQuery(
@ -1660,7 +1661,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggArrays() throws Exception
public void testArrayAggArrays()
{
try {
ExpressionProcessing.initializeForTests(true);
@ -1762,7 +1763,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggArraysNoNest() throws Exception
public void testArrayAggArraysNoNest()
{
cannotVectorize();
testQueryThrows(
@ -1823,7 +1824,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayConcatAggArrays() throws Exception
public void testArrayConcatAggArrays()
{
cannotVectorize();
testQuery(
@ -1884,7 +1885,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggToString() throws Exception
public void testArrayAggToString()
{
cannotVectorize();
testQuery(
@ -1926,7 +1927,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggExpression() throws Exception
public void testArrayAggExpression()
{
cannotVectorize();
testQuery(
@ -1970,7 +1971,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggMaxBytes() throws Exception
public void testArrayAggMaxBytes()
{
cannotVectorize();
testQuery(
@ -2028,7 +2029,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggAsArrayFromJoin() throws Exception
public void testArrayAggAsArrayFromJoin()
{
cannotVectorize();
List<Object[]> expectedResults;
@ -2110,7 +2111,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggGroupByArrayAggFromSubquery() throws Exception
public void testArrayAggGroupByArrayAggFromSubquery()
{
cannotVectorize();
@ -2178,7 +2179,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggGroupByArrayAggOfLongsFromSubquery() throws Exception
public void testArrayAggGroupByArrayAggOfLongsFromSubquery() throws IOException
{
requireMergeBuffers(3);
cannotVectorize();
@ -2251,7 +2252,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggGroupByArrayAggOfStringsFromSubquery() throws Exception
public void testArrayAggGroupByArrayAggOfStringsFromSubquery() throws IOException
{
requireMergeBuffers(3);
cannotVectorize();
@ -2317,7 +2318,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggGroupByArrayAggOfDoubleFromSubquery() throws Exception
public void testArrayAggGroupByArrayAggOfDoubleFromSubquery() throws IOException
{
requireMergeBuffers(3);
cannotVectorize();
@ -2384,7 +2385,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggArrayContainsSubquery() throws Exception
public void testArrayAggArrayContainsSubquery()
{
cannotVectorize();
List<Object[]> expectedResults;
@ -2467,7 +2468,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
}
@Test
public void testArrayAggGroupByArrayContainsSubquery() throws Exception
public void testArrayAggGroupByArrayContainsSubquery()
{
cannotVectorize();
List<Object[]> expectedResults;

View File

@ -58,7 +58,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubquery(Map<String, Object> queryContext) throws Exception
public void testCorrelatedSubquery(Map<String, Object> queryContext)
{
cannotVectorize();
queryContext = withLeftDirectAccessEnabled(queryContext);
@ -175,7 +175,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithLeftFilter(Map<String, Object> queryContext) throws Exception
public void testCorrelatedSubqueryWithLeftFilter(Map<String, Object> queryContext)
{
cannotVectorize();
queryContext = withLeftDirectAccessEnabled(queryContext);
@ -264,7 +264,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map<String, Object> queryContext) throws Exception
public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map<String, Object> queryContext)
{
cannotVectorize();
@ -355,7 +355,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map<String, Object> queryContext) throws Exception
public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map<String, Object> queryContext)
{
cannotVectorize();
queryContext = withLeftDirectAccessEnabled(queryContext);
@ -449,7 +449,7 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithCorrelatedQueryFilter_Scan(Map<String, Object> queryContext) throws Exception
public void testCorrelatedSubqueryWithCorrelatedQueryFilter_Scan(Map<String, Object> queryContext)
{
cannotVectorize();
queryContext = withLeftDirectAccessEnabled(queryContext);

View File

@ -33,7 +33,7 @@ import java.util.Map;
public class CalciteExplainQueryTest extends BaseCalciteQueryTest
{
@Test
public void testExplainCountStarOnView() throws Exception
public void testExplainCountStarOnView()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -71,7 +71,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainInformationSchemaColumns() throws Exception
public void testExplainInformationSchemaColumns()
{
final String explanation =
"BindableProject(COLUMN_NAME=[$3], DATA_TYPE=[$7])\n"
@ -93,7 +93,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception
public void testExplainExactCountDistinctOfSemiJoinResult()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -142,7 +142,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
// This testcase has been added here and not in CalciteSelectQueryTests since this checks if the overrides are working
// properly when displaying the output of "EXPLAIN PLAN FOR ..." queries
@Test
public void testExplainSelectStarWithOverrides() throws Exception
public void testExplainSelectStarWithOverrides()
{
Map<String, Object> useRegularExplainContext = new HashMap<>(QUERY_CONTEXT_DEFAULT);
useRegularExplainContext.put(PlannerConfig.CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, false);
@ -213,7 +213,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainMultipleTopLevelUnionAllQueries() throws Exception
public void testExplainMultipleTopLevelUnionAllQueries()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -260,7 +260,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainSelectMvfilterExpressions() throws Exception
public void testExplainSelectMvfilterExpressions()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -328,7 +328,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainSelectTimestampExpression() throws Exception
public void testExplainSelectTimestampExpression()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();

View File

@ -31,7 +31,6 @@ import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContext;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
@ -41,8 +40,9 @@ import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.parser.DruidSqlInsert;
import org.apache.druid.sql.calcite.planner.Calcites;
@ -145,7 +145,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
private String expectedTargetDataSource;
private RowSignature expectedTargetSignature;
private List<ResourceAction> expectedResources;
private Query expectedQuery;
private Query<?> expectedQuery;
private Matcher<Throwable> validationErrorMatcher;
private IngestionDmlTester()
@ -267,7 +267,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
throw new ISE("Test must not have expectedQuery");
}
final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory(
final SqlStatementFactory sqlLifecycleFactory = getSqlLifecycleFactory(
plannerConfig,
new AuthConfig(),
createOperatorTable(),
@ -276,14 +276,18 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
queryJsonMapper
);
final SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize();
sqlLifecycle.initialize(sql, new QueryContext(queryContext));
DirectStatement stmt = sqlLifecycleFactory.directStatement(
SqlQueryPlus
.builder(sql)
.context(queryContext)
.auth(authenticationResult)
.build()
);
final Throwable e = Assert.assertThrows(
Throwable.class,
() -> {
sqlLifecycle.validateAndAuthorize(authenticationResult);
sqlLifecycle.plan();
stmt.execute();
}
);
@ -291,7 +295,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
Assert.assertTrue(queryLogHook.getRecordedQueries().isEmpty());
}
private void verifySuccess() throws Exception
private void verifySuccess()
{
if (expectedTargetDataSource == null) {
throw new ISE("Test must have expectedTargetDataSource");
@ -301,7 +305,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
throw new ISE("Test must have expectedResources");
}
final List<Query> expectedQueries =
final List<Query<?>> expectedQueries =
expectedQuery == null
? Collections.emptyList()
: Collections.singletonList(recursivelyOverrideContext(expectedQuery, queryContext));

View File

@ -50,6 +50,7 @@ import org.junit.Assert;
import org.junit.Test;
import org.junit.internal.matchers.ThrowableMessageMatcher;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@ -429,7 +430,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testInsertWithClusteredByAndOrderBy() throws Exception
public void testInsertWithClusteredByAndOrderBy()
{
try {
testQuery(
@ -452,7 +453,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testInsertWithPartitionedByContainingInvalidGranularity() throws Exception
public void testInsertWithPartitionedByContainingInvalidGranularity()
{
// Throws a ValidationException, which gets converted to a SqlPlanningException before throwing to end user
try {
@ -473,7 +474,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testInsertWithOrderBy() throws Exception
public void testInsertWithOrderBy()
{
try {
testQuery(
@ -514,7 +515,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testExplainInsertFromExternal() throws Exception
public void testExplainInsertFromExternal() throws IOException
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();

View File

@ -105,9 +105,8 @@ import static org.apache.druid.query.QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KE
@RunWith(JUnitParamsRunner.class)
public class CalciteJoinQueryTest extends BaseCalciteQueryTest
{
@Test
public void testInnerJoinWithLimitAndAlias() throws Exception
public void testInnerJoinWithLimitAndAlias()
{
minTopNThreshold = 1;
Map<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
@ -164,7 +163,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
public void testExactTopNOnInnerJoinWithLimit() throws Exception
public void testExactTopNOnInnerJoinWithLimit()
{
// Adjust topN threshold, so that the topN engine keeps only 1 slot for aggregates, which should be enough
// to compute the query with limit 1.
@ -214,7 +213,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJoinOuterGroupByAndSubqueryHasLimit() throws Exception
public void testJoinOuterGroupByAndSubqueryHasLimit()
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -301,7 +300,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testJoinOuterGroupByAndSubqueryNoLimit(Map<String, Object> queryContext) throws Exception
public void testJoinOuterGroupByAndSubqueryNoLimit(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -384,7 +383,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJoinWithLimitBeforeJoining() throws Exception
public void testJoinWithLimitBeforeJoining()
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -470,7 +469,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJoinOnTimeseriesWithFloorOnTime() throws Exception
public void testJoinOnTimeseriesWithFloorOnTime()
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -524,7 +523,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJoinOnGroupByInsteadOfTimeseriesWithFloorOnTime() throws Exception
public void testJoinOnGroupByInsteadOfTimeseriesWithFloorOnTime()
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -593,7 +592,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperatorWithValueFilterPushdownMatchesNothig(Map<String, Object> queryContext)
throws Exception
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -629,7 +628,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperatorAllowNulls(Map<String, Object> queryContext) throws Exception
public void testFilterAndGroupByLookupUsingJoinOperatorAllowNulls(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -668,7 +667,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperatorBackwards(Map<String, Object> queryContext) throws Exception
public void testFilterAndGroupByLookupUsingJoinOperatorBackwards(Map<String, Object> queryContext)
{
// Like "testFilterAndGroupByLookupUsingJoinOperator", but with the table and lookup reversed.
@ -717,7 +716,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperatorWithNotFilter(Map<String, Object> queryContext)
throws Exception
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -756,7 +755,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testJoinUnionTablesOnLookup(Map<String, Object> queryContext) throws Exception
public void testJoinUnionTablesOnLookup(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -802,7 +801,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testFilterAndGroupByLookupUsingJoinOperator(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -841,7 +840,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingPostAggregationJoinOperator(Map<String, Object> queryContext)
throws Exception
{
testQuery(
"SELECT base.dim2, lookyloo.v, base.cnt FROM (\n"
@ -891,7 +890,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testGroupByInnerJoinOnLookupUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testGroupByInnerJoinOnLookupUsingJoinOperator(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -927,7 +926,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSelectOnLookupUsingInnerJoinOperator(Map<String, Object> queryContext) throws Exception
public void testSelectOnLookupUsingInnerJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim2, lookyloo.*\n"
@ -959,7 +958,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinTwoLookupsUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testLeftJoinTwoLookupsUsingJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, dim2, l1.v, l2.v\n"
@ -1004,7 +1003,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTableLookupLookupWithFilterWithOuterLimit(Map<String, Object> queryContext) throws Exception
public void testInnerJoinTableLookupLookupWithFilterWithOuterLimit(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1\n"
@ -1047,7 +1046,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTableLookupLookupWithFilterWithoutLimit(Map<String, Object> queryContext) throws Exception
public void testInnerJoinTableLookupLookupWithFilterWithoutLimit(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1\n"
@ -1089,7 +1088,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTableLookupLookupWithFilterWithOuterLimitWithAllColumns(Map<String, Object> queryContext)
throws Exception
{
testQuery(
"SELECT __time, cnt, dim1, dim2, dim3, m1, m2, unique_dim1\n"
@ -1133,7 +1132,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTableLookupLookupWithFilterWithoutLimitWithAllColumns(Map<String, Object> queryContext)
throws Exception
{
testQuery(
"SELECT __time, cnt, dim1, dim2, dim3, m1, m2, unique_dim1\n"
@ -1174,7 +1173,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testManyManyInnerJoinOnManyManyLookup(Map<String, Object> queryContext) throws Exception
public void testManyManyInnerJoinOnManyManyLookup(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1\n"
@ -1403,7 +1402,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinQueryOfLookup(Map<String, Object> queryContext) throws Exception
public void testInnerJoinQueryOfLookup(Map<String, Object> queryContext)
{
// Cannot vectorize the subquery.
cannotVectorize();
@ -1455,7 +1454,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinQueryOfLookupRemovable(Map<String, Object> queryContext) throws Exception
public void testInnerJoinQueryOfLookupRemovable(Map<String, Object> queryContext)
{
// Like "testInnerJoinQueryOfLookup", but the subquery is removable.
@ -1493,7 +1492,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTwoLookupsToTableUsingNumericColumn(Map<String, Object> queryContext) throws Exception
public void testInnerJoinTwoLookupsToTableUsingNumericColumn(Map<String, Object> queryContext)
{
// Regression test for https://github.com/apache/druid/issues/9646.
@ -1555,7 +1554,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinTwoLookupsToTableUsingNumericColumnInReverse(Map<String, Object> queryContext)
throws Exception
{
// Like "testInnerJoinTwoLookupsToTableUsingNumericColumn", but the tables are specified backwards.
@ -1611,7 +1610,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinLookupTableTable(Map<String, Object> queryContext) throws Exception
public void testInnerJoinLookupTableTable(Map<String, Object> queryContext)
{
// Regression test for https://github.com/apache/druid/issues/9646.
@ -1693,7 +1692,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinLookupTableTableChained(Map<String, Object> queryContext) throws Exception
public void testInnerJoinLookupTableTableChained(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -1773,7 +1772,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
public void testWhereInSelectNullFromLookup() throws Exception
public void testWhereInSelectNullFromLookup()
{
// Regression test for https://github.com/apache/druid/issues/9646.
cannotVectorize();
@ -1815,7 +1814,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testCommaJoinLeftFunction() throws Exception
public void testCommaJoinLeftFunction()
{
testQuery(
"SELECT foo.dim1, foo.dim2, l.k, l.v\n"
@ -1853,7 +1852,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
// Hence, comma join will result in a cross join with filter on outermost
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCommaJoinTableLookupTableMismatchedTypes(Map<String, Object> queryContext) throws Exception
public void testCommaJoinTableLookupTableMismatchedTypes(Map<String, Object> queryContext)
{
// Regression test for https://github.com/apache/druid/issues/9646.
@ -1908,7 +1907,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testJoinTableLookupTableMismatchedTypesWithoutComma(Map<String, Object> queryContext) throws Exception
public void testJoinTableLookupTableMismatchedTypesWithoutComma(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -1975,7 +1974,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinCastLeft(Map<String, Object> queryContext) throws Exception
public void testInnerJoinCastLeft(Map<String, Object> queryContext)
{
// foo.m1 is FLOAT, l.k is STRING.
@ -2009,7 +2008,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinCastRight(Map<String, Object> queryContext) throws Exception
public void testInnerJoinCastRight(Map<String, Object> queryContext)
{
// foo.m1 is FLOAT, l.k is STRING.
@ -2053,7 +2052,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinMismatchedTypes(Map<String, Object> queryContext) throws Exception
public void testInnerJoinMismatchedTypes(Map<String, Object> queryContext)
{
// foo.m1 is FLOAT, l.k is STRING. Comparing them generates a CAST.
@ -2097,7 +2096,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinLeftFunction(Map<String, Object> queryContext) throws Exception
public void testInnerJoinLeftFunction(Map<String, Object> queryContext)
{
testQuery(
"SELECT foo.dim1, foo.dim2, l.k, l.v\n"
@ -2133,7 +2132,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinRightFunction(Map<String, Object> queryContext) throws Exception
public void testInnerJoinRightFunction(Map<String, Object> queryContext)
{
testQuery(
"SELECT foo.dim1, foo.dim2, l.k, l.v\n"
@ -2176,7 +2175,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinLookupOntoLookupUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testLeftJoinLookupOntoLookupUsingJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim2, l1.v, l2.v\n"
@ -2219,7 +2218,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinThreeLookupsUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testLeftJoinThreeLookupsUsingJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, dim2, l1.v, l2.v, l3.v\n"
@ -2269,7 +2268,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSelectOnLookupUsingLeftJoinOperator(Map<String, Object> queryContext) throws Exception
public void testSelectOnLookupUsingLeftJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, lookyloo.*\n"
@ -2306,7 +2305,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSelectOnLookupUsingRightJoinOperator(Map<String, Object> queryContext) throws Exception
public void testSelectOnLookupUsingRightJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, lookyloo.*\n"
@ -2341,7 +2340,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSelectOnLookupUsingFullJoinOperator(Map<String, Object> queryContext) throws Exception
public void testSelectOnLookupUsingFullJoinOperator(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, m1, cnt, lookyloo.*\n"
@ -2381,7 +2380,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInAggregationSubquery(Map<String, Object> queryContext) throws Exception
public void testInAggregationSubquery(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -2431,7 +2430,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testNotInAggregationSubquery(Map<String, Object> queryContext) throws Exception
public void testNotInAggregationSubquery(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -2517,7 +2516,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testUsingSubqueryWithExtractionFns(Map<String, Object> queryContext) throws Exception
public void testUsingSubqueryWithExtractionFns(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -2575,7 +2574,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinWithIsNullFilter(Map<String, Object> queryContext) throws Exception
public void testInnerJoinWithIsNullFilter(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1, l.v from druid.foo f inner join lookup.lookyloo l on f.dim1 = l.k where f.dim2 is null",
@ -2608,7 +2607,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
@Ignore // regression test for https://github.com/apache/druid/issues/9924
public void testInnerJoinOnMultiValueColumn(Map<String, Object> queryContext) throws Exception
public void testInnerJoinOnMultiValueColumn(Map<String, Object> queryContext)
{
cannotVectorize();
testQuery(
@ -2648,7 +2647,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter(Map<String, Object> queryContext) throws Exception
public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter(Map<String, Object> queryContext)
{
testQuery(
"with abc as\n"
@ -2718,7 +2717,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSourcesWithTimeFilter_withLeftDirectAccess(Map<String, Object> queryContext)
throws Exception
{
queryContext = withLeftDirectAccessEnabled(queryContext);
testQuery(
@ -2776,7 +2774,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere(Map<String, Object> queryContext) throws Exception
public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere(Map<String, Object> queryContext)
{
testQuery(
"with abc as\n"
@ -2831,7 +2829,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSourcesWithOuterWhere_withLeftDirectAccess(Map<String, Object> queryContext)
throws Exception
{
queryContext = withLeftDirectAccessEnabled(queryContext);
testQuery(
@ -2879,7 +2876,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSources(Map<String, Object> queryContext) throws Exception
public void testLeftJoinOnTwoInlineDataSources(Map<String, Object> queryContext)
{
testQuery(
"with abc as\n"
@ -2932,7 +2929,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinOnTwoInlineDataSources_withLeftDirectAccess(Map<String, Object> queryContext) throws Exception
public void testLeftJoinOnTwoInlineDataSources_withLeftDirectAccess(Map<String, Object> queryContext)
{
queryContext = withLeftDirectAccessEnabled(queryContext);
testQuery(
@ -2980,7 +2977,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere(Map<String, Object> queryContext) throws Exception
public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere(Map<String, Object> queryContext)
{
Druids.ScanQueryBuilder baseScanBuilder = newScanQueryBuilder()
.dataSource(
@ -3035,7 +3032,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinOnTwoInlineDataSourcesWithOuterWhere_withLeftDirectAccess(Map<String, Object> queryContext)
throws Exception
{
queryContext = withLeftDirectAccessEnabled(queryContext);
testQuery(
@ -3083,7 +3079,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinOnTwoInlineDataSources(Map<String, Object> queryContext) throws Exception
public void testInnerJoinOnTwoInlineDataSources(Map<String, Object> queryContext)
{
testQuery(
"with abc as\n"
@ -3137,7 +3133,6 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinOnTwoInlineDataSources_withLeftDirectAccess(Map<String, Object> queryContext)
throws Exception
{
queryContext = withLeftDirectAccessEnabled(queryContext);
testQuery(
@ -3195,7 +3190,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testLeftJoinRightTableCanBeEmpty() throws Exception
public void testLeftJoinRightTableCanBeEmpty()
{
// HashJoinSegmentStorageAdapter is not vectorizable
cannotVectorize();
@ -3283,7 +3278,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinSubqueryWithNullKeyFilter(Map<String, Object> queryContext) throws Exception
public void testLeftJoinSubqueryWithNullKeyFilter(Map<String, Object> queryContext)
{
// Cannot vectorize due to 'concat' expression.
cannotVectorize();
@ -3366,14 +3361,14 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinSubqueryWithSelectorFilter(Map<String, Object> queryContext) throws Exception
public void testLeftJoinSubqueryWithSelectorFilter(Map<String, Object> queryContext)
{
// Cannot vectorize due to 'concat' expression.
cannotVectorize();
// disable the cost model where inner join is treated like a filter
// this leads to cost(left join) < cost(converted inner join) for the below query
queryContext = QueryContextForJoinProvider.withOverrides(
queryContext = QueryContexts.override(
queryContext,
ImmutableMap.of("computeInnerJoinCostAsFilter", "false")
);
@ -3419,7 +3414,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testLeftJoinWithNotNullFilter(Map<String, Object> queryContext) throws Exception
public void testLeftJoinWithNotNullFilter(Map<String, Object> queryContext)
{
testQuery(
"SELECT s.dim1, t.dim1\n"
@ -3461,11 +3456,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
);
}
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinSubqueryWithSelectorFilter(Map<String, Object> queryContext) throws Exception
public void testInnerJoinSubqueryWithSelectorFilter(Map<String, Object> queryContext)
{
// Cannot vectorize due to 'concat' expression.
cannotVectorize();
@ -3519,7 +3512,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSemiJoinWithOuterTimeExtractScan() throws Exception
public void testSemiJoinWithOuterTimeExtractScan()
{
testQuery(
"SELECT dim1, EXTRACT(MONTH FROM __time) FROM druid.foo\n"
@ -3565,7 +3558,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testTwoSemiJoinsSimultaneously(Map<String, Object> queryContext) throws Exception
public void testTwoSemiJoinsSimultaneously(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -3630,7 +3623,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery(Map<String, Object> queryContext) throws Exception
public void testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery(Map<String, Object> queryContext)
{
cannotVectorize();
@ -3736,7 +3729,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSemiAndAntiJoinSimultaneouslyUsingExplicitJoins(Map<String, Object> queryContext) throws Exception
public void testSemiAndAntiJoinSimultaneouslyUsingExplicitJoins(Map<String, Object> queryContext)
{
cannotVectorize();
@ -3803,7 +3796,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSemiJoinWithOuterTimeExtractAggregateWithOrderBy() throws Exception
public void testSemiJoinWithOuterTimeExtractAggregateWithOrderBy()
{
// Cannot vectorize due to virtual columns.
cannotVectorize();
@ -3885,7 +3878,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
// TODO: Remove expected Exception when https://github.com/apache/druid/issues/9924 is fixed
@Test(expected = QueryException.class)
@Parameters(source = QueryContextForJoinProvider.class)
public void testJoinOnMultiValuedColumnShouldThrowException(Map<String, Object> queryContext) throws Exception
public void testJoinOnMultiValuedColumnShouldThrowException(Map<String, Object> queryContext)
{
final String query = "SELECT dim3, l.v from druid.foo f inner join lookup.lookyloo l on f.dim3 = l.k\n";
@ -3899,7 +3892,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testUnionAllTwoQueriesLeftQueryIsJoin(Map<String, Object> queryContext) throws Exception
public void testUnionAllTwoQueriesLeftQueryIsJoin(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -3940,7 +3933,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testUnionAllTwoQueriesRightQueryIsJoin(Map<String, Object> queryContext) throws Exception
public void testUnionAllTwoQueriesRightQueryIsJoin(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -3980,7 +3973,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testUnionAllTwoQueriesBothQueriesAreJoin() throws Exception
public void testUnionAllTwoQueriesBothQueriesAreJoin()
{
cannotVectorize();
@ -4027,7 +4020,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testTopNFilterJoin(Map<String, Object> queryContext) throws Exception
public void testTopNFilterJoin(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -4103,7 +4096,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testTopNFilterJoinWithProjection(Map<String, Object> queryContext) throws Exception
public void testTopNFilterJoinWithProjection(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -4175,7 +4168,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
@Ignore("Stopped working after the ability to join on subqueries was added to DruidJoinRule")
public void testRemovableLeftJoin(Map<String, Object> queryContext) throws Exception
public void testRemovableLeftJoin(Map<String, Object> queryContext)
{
// LEFT JOIN where the right-hand side can be ignored.
@ -4229,7 +4222,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCountDistinctOfLookupUsingJoinOperator(Map<String, Object> queryContext) throws Exception
public void testCountDistinctOfLookupUsingJoinOperator(Map<String, Object> queryContext)
{
// Cannot yet vectorize the JOIN operator.
cannotVectorize();
@ -4271,7 +4264,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testUsingSubqueryAsPartOfAndFilter(Map<String, Object> queryContext) throws Exception
public void testUsingSubqueryAsPartOfAndFilter(Map<String, Object> queryContext)
{
// Fully removing the join allows this query to vectorize.
if (!isRewriteJoinToFilter(queryContext)) {
@ -4334,7 +4327,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testUsingSubqueryAsPartOfOrFilter(Map<String, Object> queryContext) throws Exception
public void testUsingSubqueryAsPartOfOrFilter(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.
cannotVectorize();
@ -4424,7 +4417,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testNestedGroupByOnInlineDataSourceWithFilter(Map<String, Object> queryContext) throws Exception
public void testNestedGroupByOnInlineDataSourceWithFilter(Map<String, Object> queryContext)
{
// Cannot vectorize due to virtual columns.
cannotVectorize();
@ -4576,7 +4569,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCountOnSemiJoinSingleColumn(Map<String, Object> queryContext) throws Exception
public void testCountOnSemiJoinSingleColumn(Map<String, Object> queryContext)
{
testQuery(
"SELECT dim1 FROM foo WHERE dim1 IN (SELECT dim1 FROM foo WHERE dim1 = '10.1')\n",
@ -4617,7 +4610,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testTopNOnStringWithNonSortedOrUniqueDictionary(Map<String, Object> queryContext) throws Exception
public void testTopNOnStringWithNonSortedOrUniqueDictionary(Map<String, Object> queryContext)
{
testQuery(
"SELECT druid.broadcast.dim4, COUNT(*)\n"
@ -4658,7 +4651,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testTopNOnStringWithNonSortedOrUniqueDictionaryOrderByDim(Map<String, Object> queryContext)
throws Exception
{
testQuery(
"SELECT druid.broadcast.dim4, COUNT(*)\n"
@ -4697,7 +4690,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testVirtualColumnOnMVFilterJoinExpression(Map<String, Object> queryContext) throws Exception
public void testVirtualColumnOnMVFilterJoinExpression(Map<String, Object> queryContext)
{
testQuery(
"SELECT foo1.dim3, foo2.dim3 FROM druid.numfoo as foo1 INNER JOIN druid.numfoo as foo2 "
@ -4746,7 +4739,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testVirtualColumnOnMVFilterMultiJoinExpression(Map<String, Object> queryContext) throws Exception
public void testVirtualColumnOnMVFilterMultiJoinExpression(Map<String, Object> queryContext)
{
testQuery(
"SELECT foo1.dim3, foo2.dim3 FROM druid.numfoo as foo1 INNER JOIN "
@ -4818,7 +4811,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinWithFilterPushdownAndManyFiltersEmptyResults(Map<String, Object> queryContext) throws Exception
public void testInnerJoinWithFilterPushdownAndManyFiltersEmptyResults(Map<String, Object> queryContext)
{
// create the query we expect
ScanQuery query = newScanQueryBuilder()
@ -4937,7 +4930,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testInnerJoinWithFilterPushdownAndManyFiltersNonEmptyResults(Map<String, Object> queryContext) throws Exception
public void testInnerJoinWithFilterPushdownAndManyFiltersNonEmptyResults(Map<String, Object> queryContext)
{
// create the query we expect
ScanQuery query = newScanQueryBuilder()
@ -5062,7 +5055,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
}
@Test
public void testPlanWithInFilterMoreThanInSubQueryThreshold() throws Exception
public void testPlanWithInFilterMoreThanInSubQueryThreshold()
{
String query = "SELECT l1 FROM numfoo WHERE l1 IN (4842, 4844, 4845, 14905, 4853, 29064)";

View File

@ -55,7 +55,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
{
// various queries on multi-valued string dimensions using them like strings
@Test
public void testMultiValueStringWorksLikeStringGroupBy() throws Exception
public void testMultiValueStringWorksLikeStringGroupBy()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -112,7 +112,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringGroupByDoesNotWork() throws Exception
public void testMultiValueStringGroupByDoesNotWork()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -136,7 +136,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringWorksLikeStringGroupByWithFilter() throws Exception
public void testMultiValueStringWorksLikeStringGroupByWithFilter()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -176,7 +176,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringWorksLikeStringScan() throws Exception
public void testMultiValueStringWorksLikeStringScan()
{
final String nullVal = NullHandling.replaceWithDefault() ? "[\"foo\"]" : "[null]";
testQuery(
@ -204,7 +204,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringWorksLikeStringSelfConcatScan() throws Exception
public void testMultiValueStringWorksLikeStringSelfConcatScan()
{
final String nullVal = NullHandling.replaceWithDefault() ? "[\"-lol-\"]" : "[null]";
testQuery(
@ -232,7 +232,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringWorksLikeStringScanWithFilter() throws Exception
public void testMultiValueStringWorksLikeStringScanWithFilter()
{
testQuery(
"SELECT concat(dim3, 'foo') FROM druid.numfoo where concat(dim3, 'foo') = 'bfoo'",
@ -257,7 +257,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
// these are a copy of the ARRAY functions tests in CalciteArraysQueryTest
@Test
public void testMultiValueStringOverlapFilter() throws Exception
public void testMultiValueStringOverlapFilter()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE MV_OVERLAP(dim3, ARRAY['a','b']) LIMIT 5",
@ -280,7 +280,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringOverlapFilterNonLiteral() throws Exception
public void testMultiValueStringOverlapFilterNonLiteral()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE MV_OVERLAP(dim3, ARRAY[dim2]) LIMIT 5",
@ -300,7 +300,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringContainsFilter() throws Exception
public void testMultiValueStringContainsFilter()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY['a','b']) LIMIT 5",
@ -327,7 +327,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringContainsArrayOfOneElement() throws Exception
public void testMultiValueStringContainsArrayOfOneElement()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY['a']) LIMIT 5",
@ -349,7 +349,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringContainsArrayOfNonLiteral() throws Exception
public void testMultiValueStringContainsArrayOfNonLiteral()
{
testQuery(
"SELECT dim3 FROM druid.numfoo WHERE MV_CONTAINS(dim3, ARRAY[dim2]) LIMIT 5",
@ -371,7 +371,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringSlice() throws Exception
public void testMultiValueStringSlice()
{
testQuery(
"SELECT MV_SLICE(dim3, 1) FROM druid.numfoo",
@ -398,7 +398,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringLength() throws Exception
public void testMultiValueStringLength()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -441,7 +441,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringAppend() throws Exception
public void testMultiValueStringAppend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -501,7 +501,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringPrepend() throws Exception
public void testMultiValueStringPrepend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -561,7 +561,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringPrependAppend() throws Exception
public void testMultiValueStringPrependAppend()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -625,7 +625,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringConcat() throws Exception
public void testMultiValueStringConcat()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -683,7 +683,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringConcatBackwardsCompat0dot22andOlder() throws Exception
public void testMultiValueStringConcatBackwardsCompat0dot22andOlder()
{
try {
ExpressionProcessing.initializeForHomogenizeNullMultiValueStrings();
@ -747,7 +747,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringOffset() throws Exception
public void testMultiValueStringOffset()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -786,7 +786,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringOrdinal() throws Exception
public void testMultiValueStringOrdinal()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -825,7 +825,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringOffsetOf() throws Exception
public void testMultiValueStringOffsetOf()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -874,7 +874,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringOrdinalOf() throws Exception
public void testMultiValueStringOrdinalOf()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -924,7 +924,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringToString() throws Exception
public void testMultiValueStringToString()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -980,7 +980,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueStringToStringToMultiValueString() throws Exception
public void testMultiValueStringToStringToMultiValueString()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1041,7 +1041,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
@Test
public void testMultiValueListFilter() throws Exception
public void testMultiValueListFilter()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1086,7 +1086,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterDeny() throws Exception
public void testMultiValueListFilterDeny()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1141,7 +1141,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposed() throws Exception
public void testMultiValueListFilterComposed()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1198,7 +1198,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposedNested() throws Exception
public void testMultiValueListFilterComposedNested()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1265,7 +1265,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposedNested2Input() throws Exception
public void testMultiValueListFilterComposedNested2Input()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1320,7 +1320,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposedNestedNullLiteral() throws Exception
public void testMultiValueListFilterComposedNestedNullLiteral()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1380,7 +1380,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposedDeny() throws Exception
public void testMultiValueListFilterComposedDeny()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1429,7 +1429,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueListFilterComposedMultipleExpressions() throws Exception
public void testMultiValueListFilterComposedMultipleExpressions()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1490,7 +1490,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testFilterOnMultiValueListFilterNoMatch() throws Exception
public void testFilterOnMultiValueListFilterNoMatch()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1533,7 +1533,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testFilterOnMultiValueListFilterMatch() throws Exception
public void testFilterOnMultiValueListFilterMatch()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1580,7 +1580,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testFilterOnMultiValueListFilterMatchLike() throws Exception
public void testFilterOnMultiValueListFilterMatchLike()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1627,7 +1627,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayGroupAsArrayWithMultiValueDimension() throws Exception
public void testMultiValueToArrayGroupAsArrayWithMultiValueDimension()
{
// Cannot vectorize as we donot have support in native query subsytem for grouping on arrays as keys
cannotVectorize();
@ -1679,7 +1679,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
@Test
public void testMultiValueToArrayGroupAsArrayWithSingleValueDim() throws Exception
public void testMultiValueToArrayGroupAsArrayWithSingleValueDim()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1733,7 +1733,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayGroupAsArrayWithSingleValueDimIsNotConvertedToTopN() throws Exception
public void testMultiValueToArrayGroupAsArrayWithSingleValueDimIsNotConvertedToTopN()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -1789,7 +1789,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayMoreArgs() throws Exception
public void testMultiValueToArrayMoreArgs()
{
testQueryThrows(
"SELECT MV_TO_ARRAY(dim3,dim3) FROM druid.numfoo",
@ -1801,7 +1801,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayNoArgs() throws Exception
public void testMultiValueToArrayNoArgs()
{
testQueryThrows(
"SELECT MV_TO_ARRAY() FROM druid.numfoo",
@ -1813,7 +1813,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayArgsWithMultiValueDimFunc() throws Exception
public void testMultiValueToArrayArgsWithMultiValueDimFunc()
{
testQueryThrows(
"SELECT MV_TO_ARRAY(concat(dim3,'c')) FROM druid.numfoo",
@ -1822,7 +1822,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayArgsWithSingleDimFunc() throws Exception
public void testMultiValueToArrayArgsWithSingleDimFunc()
{
testQueryThrows(
"SELECT MV_TO_ARRAY(concat(dim1,'c')) FROM druid.numfoo",
@ -1831,7 +1831,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayArgsWithConstant() throws Exception
public void testMultiValueToArrayArgsWithConstant()
{
testQueryThrows(
"SELECT MV_TO_ARRAY(concat(dim1,'c')) FROM druid.numfoo",
@ -1840,7 +1840,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultiValueToArrayArgsWithArray() throws Exception
public void testMultiValueToArrayArgsWithArray()
{
testQueryThrows(
"SELECT MV_TO_ARRAY(Array[1,2]) FROM druid.numfoo",

View File

@ -203,7 +203,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPath() throws Exception
public void testGroupByPath()
{
testQuery(
"SELECT "
@ -240,7 +240,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByJsonValue() throws Exception
public void testGroupByJsonValue()
{
testQuery(
"SELECT "
@ -277,7 +277,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testTopNPath() throws Exception
public void testTopNPath()
{
testQuery(
"SELECT "
@ -314,7 +314,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByRootPath() throws Exception
public void testGroupByRootPath()
{
testQuery(
"SELECT "
@ -351,7 +351,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByGetPaths() throws Exception
public void testGroupByGetPaths()
{
testQuery(
"SELECT "
@ -399,7 +399,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByJsonGetPaths() throws Exception
public void testGroupByJsonGetPaths()
{
testQuery(
"SELECT "
@ -447,7 +447,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByJsonValues() throws Exception
public void testGroupByJsonValues()
{
testQuery(
"SELECT "
@ -487,7 +487,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilter() throws Exception
public void testGroupByPathSelectorFilter()
{
testQuery(
"SELECT "
@ -526,7 +526,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterLong() throws Exception
public void testGroupByPathSelectorFilterLong()
{
testQuery(
"SELECT "
@ -566,7 +566,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterDouble() throws Exception
public void testGroupByPathSelectorFilterDouble()
{
testQuery(
"SELECT "
@ -606,7 +606,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterString() throws Exception
public void testGroupByPathSelectorFilterString()
{
testQuery(
"SELECT "
@ -646,7 +646,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterVariant() throws Exception
public void testGroupByPathSelectorFilterVariant()
{
testQuery(
"SELECT "
@ -681,7 +681,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterVariant2() throws Exception
public void testGroupByPathSelectorFilterVariant2()
{
testQuery(
"SELECT "
@ -719,7 +719,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterVariant3() throws Exception
public void testGroupByPathSelectorFilterVariant3()
{
testQuery(
"SELECT "
@ -757,7 +757,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterNonExistent() throws Exception
public void testGroupByPathSelectorFilterNonExistent()
{
testQuery(
"SELECT "
@ -792,7 +792,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathSelectorFilterNull() throws Exception
public void testGroupByPathSelectorFilterNull()
{
testQuery(
"SELECT "
@ -830,7 +830,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLong() throws Exception
public void testGroupByPathBoundFilterLong()
{
testQuery(
"SELECT "
@ -867,7 +867,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLongNoUpper() throws Exception
public void testGroupByPathBoundFilterLongNoUpper()
{
testQuery(
"SELECT "
@ -904,7 +904,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLongNoLower() throws Exception
public void testGroupByPathBoundFilterLongNoLower()
{
testQuery(
"SELECT "
@ -940,7 +940,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLongNumeric() throws Exception
public void testGroupByPathBoundFilterLongNumeric()
{
testQuery(
"SELECT "
@ -978,7 +978,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLongNoUpperNumeric() throws Exception
public void testGroupByPathBoundFilterLongNoUpperNumeric()
{
testQuery(
"SELECT "
@ -1016,7 +1016,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathNumericBoundFilterLongNoUpperNumeric() throws Exception
public void testGroupByPathNumericBoundFilterLongNoUpperNumeric()
{
testQuery(
"SELECT "
@ -1053,7 +1053,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterLongNoLowerNumeric() throws Exception
public void testGroupByPathBoundFilterLongNoLowerNumeric()
{
testQuery(
"SELECT "
@ -1091,7 +1091,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterDouble() throws Exception
public void testGroupByPathBoundFilterDouble()
{
testQuery(
"SELECT "
@ -1128,7 +1128,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterDoubleNoUpper() throws Exception
public void testGroupByPathBoundFilterDoubleNoUpper()
{
testQuery(
"SELECT "
@ -1165,7 +1165,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterDoubleNoLower() throws Exception
public void testGroupByPathBoundFilterDoubleNoLower()
{
testQuery(
"SELECT "
@ -1201,7 +1201,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundDoubleFilterNumeric() throws Exception
public void testGroupByPathBoundDoubleFilterNumeric()
{
testQuery(
"SELECT "
@ -1239,7 +1239,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterDoubleNoUpperNumeric() throws Exception
public void testGroupByPathBoundFilterDoubleNoUpperNumeric()
{
testQuery(
"SELECT "
@ -1277,7 +1277,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterDoubleNoLowerNumeric() throws Exception
public void testGroupByPathBoundFilterDoubleNoLowerNumeric()
{
testQuery(
"SELECT "
@ -1315,7 +1315,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterString() throws Exception
public void testGroupByPathBoundFilterString()
{
testQuery(
"SELECT "
@ -1352,7 +1352,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterStringNoUpper() throws Exception
public void testGroupByPathBoundFilterStringNoUpper()
{
testQuery(
"SELECT "
@ -1390,7 +1390,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathBoundFilterStringNoLower() throws Exception
public void testGroupByPathBoundFilterStringNoLower()
{
testQuery(
"SELECT "
@ -1428,7 +1428,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathLikeFilter() throws Exception
public void testGroupByPathLikeFilter()
{
testQuery(
"SELECT "
@ -1464,7 +1464,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathLikeFilterStringPrefix() throws Exception
public void testGroupByPathLikeFilterStringPrefix()
{
testQuery(
"SELECT "
@ -1501,7 +1501,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathLikeFilterString() throws Exception
public void testGroupByPathLikeFilterString()
{
testQuery(
"SELECT "
@ -1538,7 +1538,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathLikeFilterVariant() throws Exception
public void testGroupByPathLikeFilterVariant()
{
testQuery(
"SELECT "
@ -1575,7 +1575,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathInFilter() throws Exception
public void testGroupByPathInFilter()
{
testQuery(
"SELECT "
@ -1613,7 +1613,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathInFilterDouble() throws Exception
public void testGroupByPathInFilterDouble()
{
testQuery(
"SELECT "
@ -1651,7 +1651,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathInFilterString() throws Exception
public void testGroupByPathInFilterString()
{
testQuery(
"SELECT "
@ -1689,7 +1689,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByPathInFilterVariant() throws Exception
public void testGroupByPathInFilterVariant()
{
testQuery(
"SELECT "
@ -1726,7 +1726,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSumPath() throws Exception
public void testSumPath()
{
testQuery(
"SELECT "
@ -1753,7 +1753,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
@Test
public void testSumPathFilteredAggDouble() throws Exception
public void testSumPathFilteredAggDouble()
{
// this one actually equals 2.1 because the filter is a long so double is cast and is 1 so both rows match
testQuery(
@ -1791,7 +1791,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSumPathFilteredAggString() throws Exception
public void testSumPathFilteredAggString()
{
testQuery(
"SELECT "
@ -1828,7 +1828,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSumPathMixed() throws Exception
public void testSumPathMixed()
{
// throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]"
skipVectorize();
@ -1856,7 +1856,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSumPathMixedFilteredAggLong() throws Exception
public void testSumPathMixedFilteredAggLong()
{
// throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]"
skipVectorize();
@ -1896,7 +1896,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSumPathMixedFilteredAggDouble() throws Exception
public void testSumPathMixedFilteredAggDouble()
{
// throws a "Cannot make vector value selector for variant typed nested field [[LONG, DOUBLE]]"
skipVectorize();
@ -1932,7 +1932,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testCastAndSumPath() throws Exception
public void testCastAndSumPath()
{
testQuery(
"SELECT "
@ -1959,7 +1959,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
@Test
public void testCastAndSumPathStrings() throws Exception
public void testCastAndSumPathStrings()
{
testQuery(
"SELECT "
@ -1985,7 +1985,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testReturningAndSumPath() throws Exception
public void testReturningAndSumPath()
{
testQuery(
"SELECT "
@ -2012,7 +2012,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
@Test
public void testReturningAndSumPathStrings() throws Exception
public void testReturningAndSumPathStrings()
{
testQuery(
"SELECT "
@ -2038,7 +2038,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByRootKeys() throws Exception
public void testGroupByRootKeys()
{
cannotVectorize();
testQuery(
@ -2080,7 +2080,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByRootKeysJsonPath() throws Exception
public void testGroupByRootKeysJsonPath()
{
cannotVectorize();
testQuery(
@ -2122,7 +2122,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByRootKeys2() throws Exception
public void testGroupByRootKeys2()
{
cannotVectorize();
testQuery(
@ -2165,7 +2165,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByAllPaths() throws Exception
public void testGroupByAllPaths()
{
cannotVectorize();
testQuery(
@ -2207,7 +2207,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByNestedArrayPath() throws Exception
public void testGroupByNestedArrayPath()
{
testQuery(
"SELECT "
@ -2243,7 +2243,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByInvalidPath() throws Exception
public void testGroupByInvalidPath()
{
testQueryThrows(
"SELECT "
@ -2259,7 +2259,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJsonQuery() throws Exception
public void testJsonQuery()
{
testQuery(
"SELECT JSON_QUERY(nester, '$.n'), JSON_QUERY(nester, '$')\n"
@ -2311,7 +2311,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testJsonQueryAndJsonObject() throws Exception
public void testJsonQueryAndJsonObject()
{
testQuery(
"SELECT JSON_OBJECT(KEY 'n' VALUE JSON_QUERY(nester, '$.n'), KEY 'x' VALUE JSON_VALUE(nest, '$.x'))\n"
@ -2359,7 +2359,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
}
@Test
public void testToJsonAndParseJson() throws Exception
public void testToJsonAndParseJson()
{
testQuery(
"SELECT string, TO_JSON(string), PARSE_JSON(string), PARSE_JSON('{\"foo\":1}'), PARSE_JSON(TO_JSON_STRING(nester))\n"

View File

@ -56,7 +56,7 @@ import java.util.List;
public class CalciteParameterQueryTest extends BaseCalciteQueryTest
{
@Test
public void testSelectConstantParamGetsConstant() throws Exception
public void testSelectConstantParamGetsConstant()
{
testQuery(
"SELECT 1 + ?",
@ -82,7 +82,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParamsGetOptimizedIntoConstant() throws Exception
public void testParamsGetOptimizedIntoConstant()
{
testQuery(
"SELECT 1 + ?, dim1 FROM foo LIMIT ?",
@ -108,7 +108,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParametersInSelectAndFilter() throws Exception
public void testParametersInSelectAndFilter()
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -140,7 +140,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectTrimFamilyWithParameters() throws Exception
public void testSelectTrimFamilyWithParameters()
{
// TRIM has some whacky parsing. Abuse this to test a bunch of parameters
@ -207,7 +207,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParamsInInformationSchema() throws Exception
public void testParamsInInformationSchema()
{
// Not including COUNT DISTINCT, since it isn't supported by BindableAggregate, and so it can't work.
testQuery(
@ -231,7 +231,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParamsInSelectExpressionAndLimit() throws Exception
public void testParamsInSelectExpressionAndLimit()
{
testQuery(
"SELECT SUBSTRING(dim2, ?, ?) FROM druid.foo LIMIT ?",
@ -261,7 +261,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParamsTuckedInACast() throws Exception
public void testParamsTuckedInACast()
{
testQuery(
"SELECT dim1, m1, COUNT(*) FROM druid.foo WHERE m1 - CAST(? as INT) = dim1 GROUP BY dim1, m1",
@ -294,7 +294,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParametersInStrangePlaces() throws Exception
public void testParametersInStrangePlaces()
{
testQuery(
"SELECT\n"
@ -338,7 +338,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testParametersInCases() throws Exception
public void testParametersInCases()
{
testQuery(
"SELECT\n"
@ -372,7 +372,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
@Test
public void testTimestamp() throws Exception
public void testTimestamp()
{
// with millis
testQuery(
@ -409,7 +409,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testTimestampString() throws Exception
public void testTimestampString()
{
// with timestampstring
testQuery(
@ -445,7 +445,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testDate() throws Exception
public void testDate()
{
// with date from millis
@ -482,7 +482,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testDoubles() throws Exception
public void testDoubles()
{
testQuery(
"SELECT COUNT(*) FROM druid.foo WHERE cnt > ? and cnt < ?",
@ -531,7 +531,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testFloats() throws Exception
public void testFloats()
{
testQuery(
"SELECT COUNT(*) FROM druid.foo WHERE cnt = ?",
@ -553,7 +553,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testLongs() throws Exception
public void testLongs()
{
testQuery(
"SELECT COUNT(*)\n"
@ -575,7 +575,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMissingParameter() throws Exception
public void testMissingParameter()
{
expectedException.expect(SqlPlanningException.class);
expectedException.expectMessage("Parameter at position [0] is not bound");
@ -590,7 +590,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testPartiallyMissingParameter() throws Exception
public void testPartiallyMissingParameter()
{
expectedException.expect(SqlPlanningException.class);
expectedException.expectMessage("Parameter at position [1] is not bound");
@ -605,7 +605,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testPartiallyMissingParameterInTheMiddle() throws Exception
public void testPartiallyMissingParameterInTheMiddle()
{
List<SqlParameter> params = new ArrayList<>();
params.add(null);
@ -621,7 +621,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testWrongTypeParameter() throws Exception
public void testWrongTypeParameter()
{
if (!useDefault) {
// cannot vectorize inline datasource
@ -666,7 +666,7 @@ public class CalciteParameterQueryTest extends BaseCalciteQueryTest
}
@Test
public void testNullParameter() throws Exception
public void testNullParameter()
{
cannotVectorize();
// contrived example of using null as an sql parameter to at least test the codepath because lots of things dont

View File

@ -44,6 +44,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@ -572,7 +573,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testReplaceWithPartitionedByContainingInvalidGranularity() throws Exception
public void testReplaceWithPartitionedByContainingInvalidGranularity()
{
// Throws a ValidationException, which gets converted to a SqlPlanningException before throwing to end user
try {
@ -593,7 +594,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest
}
@Test
public void testExplainReplaceFromExternal() throws Exception
public void testExplainReplaceFromExternal() throws IOException
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();

View File

@ -59,7 +59,7 @@ import java.util.Map;
public class CalciteSelectQueryTest extends BaseCalciteQueryTest
{
@Test
public void testSelectConstantExpression() throws Exception
public void testSelectConstantExpression()
{
// Test with a Druid-specific function, to make sure they are hooked up correctly even when not selecting
// from a table.
@ -95,7 +95,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExpressionContainingNull() throws Exception
public void testExpressionContainingNull()
{
testQuery(
"SELECT ARRAY ['Hello', NULL]",
@ -127,7 +127,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testValuesContainingNull() throws Exception
public void testValuesContainingNull()
{
testQuery(
"SELECT * FROM (VALUES (NULL, 'United States'))",
@ -155,7 +155,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultipleValuesContainingNull() throws Exception
public void testMultipleValuesContainingNull()
{
testQuery(
"SELECT * FROM (VALUES (NULL, 'United States'), ('Delhi', 'India'))",
@ -183,7 +183,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMultipleValuesContainingNullAndIntegerValues() throws Exception
public void testMultipleValuesContainingNullAndIntegerValues()
{
testQuery(
"SELECT * FROM (VALUES (NULL, 'United States'), (50, 'India'))",
@ -211,7 +211,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectNonNumericNumberLiterals() throws Exception
public void testSelectNonNumericNumberLiterals()
{
// Tests to convert NaN, positive infinity and negative infinity as literals.
testQuery(
@ -258,7 +258,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
// Test that the integers are getting correctly casted after being passed through a function when not selecting from
// a table
@Test
public void testDruidLogicalValuesRule() throws Exception
public void testDruidLogicalValuesRule()
{
testQuery(
"SELECT FLOOR(123), CEIL(123), CAST(123.0 AS INTEGER)",
@ -287,7 +287,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectConstantExpressionFromTable() throws Exception
public void testSelectConstantExpressionFromTable()
{
testQuery(
"SELECT 1 + 1, dim1 FROM foo LIMIT 1",
@ -309,7 +309,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectConstantExpressionEquivalentToNaN() throws Exception
public void testSelectConstantExpressionEquivalentToNaN()
{
expectedException.expectMessage(
"'(log10(0) - log10(0))' evaluates to 'NaN' that is not supported in SQL. You can either cast the expression as bigint ('cast((log10(0) - log10(0)) as bigint)') or char ('cast((log10(0) - log10(0)) as char)') or change the expression itself");
@ -321,7 +321,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectConstantExpressionEquivalentToInfinity() throws Exception
public void testSelectConstantExpressionEquivalentToInfinity()
{
expectedException.expectMessage(
"'log10(0)' evaluates to '-Infinity' that is not supported in SQL. You can either cast the expression as bigint ('cast(log10(0) as bigint)') or char ('cast(log10(0) as char)') or change the expression itself");
@ -333,7 +333,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectTrimFamily() throws Exception
public void testSelectTrimFamily()
{
// TRIM has some whacky parsing. Make sure the different forms work.
@ -381,7 +381,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectPadFamily() throws Exception
public void testSelectPadFamily()
{
testQuery(
"SELECT\n"
@ -417,7 +417,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testBitwiseExpressions() throws Exception
public void testBitwiseExpressions()
{
List<Object[]> expected;
if (useDefault) {
@ -475,7 +475,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSafeDivideExpressions() throws Exception
public void testSafeDivideExpressions()
{
List<Object[]> expected;
if (useDefault) {
@ -525,7 +525,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainSelectConstantExpression() throws Exception
public void testExplainSelectConstantExpression()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -569,7 +569,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarWithDimFilter() throws Exception
public void testSelectStarWithDimFilter()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -600,7 +600,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithCascadeExtractionFilter() throws Exception
public void testSelectDistinctWithCascadeExtractionFilter()
{
testQuery(
"SELECT distinct dim1 FROM druid.foo WHERE substring(substring(dim1, 2), 1, 1) = 'e' OR dim2 = 'a'",
@ -635,7 +635,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithStrlenFilter() throws Exception
public void testSelectDistinctWithStrlenFilter()
{
// Cannot vectorize due to usage of expressions.
cannotVectorize();
@ -675,7 +675,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithLimit() throws Exception
public void testSelectDistinctWithLimit()
{
// Should use topN even if approximate topNs are off, because this query is exact.
@ -708,7 +708,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithSortAsOuterQuery() throws Exception
public void testSelectDistinctWithSortAsOuterQuery()
{
testQuery(
"SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2) LIMIT 10",
@ -739,7 +739,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithSortAsOuterQuery2() throws Exception
public void testSelectDistinctWithSortAsOuterQuery2()
{
testQuery(
"SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2 LIMIT 5) LIMIT 10",
@ -770,7 +770,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectDistinctWithSortAsOuterQuery3() throws Exception
public void testSelectDistinctWithSortAsOuterQuery3()
{
testQuery(
"SELECT * FROM (SELECT DISTINCT dim2 FROM druid.foo ORDER BY dim2 DESC LIMIT 5) LIMIT 10",
@ -801,7 +801,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectNonAggregatingWithLimitLiterallyZero() throws Exception
public void testSelectNonAggregatingWithLimitLiterallyZero()
{
// Query reduces to LIMIT 0.
@ -827,7 +827,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectNonAggregatingWithLimitReducedToZero() throws Exception
public void testSelectNonAggregatingWithLimitReducedToZero()
{
// Query reduces to LIMIT 0.
@ -852,7 +852,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectAggregatingWithLimitReducedToZero() throws Exception
public void testSelectAggregatingWithLimitReducedToZero()
{
// Query reduces to LIMIT 0.
@ -877,7 +877,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectCurrentTimeAndDateLosAngeles() throws Exception
public void testSelectCurrentTimeAndDateLosAngeles()
{
final Map<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
context.put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00.123Z");
@ -953,7 +953,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectCurrentTimePrecisionTooHigh() throws Exception
public void testSelectCurrentTimePrecisionTooHigh()
{
testQueryThrows(
"SELECT CURRENT_TIMESTAMP(4)",
@ -967,7 +967,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectCountStar() throws Exception
public void testSelectCountStar()
{
// timeseries with all granularity have a single group, so should return default results for given aggregators
// which for count is 0 and sum is null in sql compatible mode or 0.0 in default mode.
@ -1058,7 +1058,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarFromLookup() throws Exception
public void testSelectStarFromLookup()
{
testQuery(
"SELECT * FROM lookup.lookyloo",
@ -1080,7 +1080,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStar() throws Exception
public void testSelectStar()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -1108,7 +1108,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarOnForbiddenTable() throws Exception
public void testSelectStarOnForbiddenTable()
{
assertQueryIsForbidden(
"SELECT * FROM druid.forbiddenDatasource",
@ -1152,7 +1152,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarOnForbiddenView() throws Exception
public void testSelectStarOnForbiddenView()
{
assertQueryIsForbidden(
"SELECT * FROM view.forbiddenView",
@ -1193,7 +1193,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarOnRestrictedView() throws Exception
public void testSelectStarOnRestrictedView()
{
testQuery(
PLANNER_CONFIG_DEFAULT,
@ -1245,7 +1245,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testUnqualifiedTableName() throws Exception
public void testUnqualifiedTableName()
{
testQuery(
"SELECT COUNT(*) FROM foo",
@ -1265,7 +1265,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testExplainSelectStar() throws Exception
public void testExplainSelectStar()
{
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
@ -1310,7 +1310,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarWithLimit() throws Exception
public void testSelectStarWithLimit()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -1335,7 +1335,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarWithLimitAndOffset() throws Exception
public void testSelectStarWithLimitAndOffset()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -1361,7 +1361,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectWithProjection() throws Exception
public void testSelectWithProjection()
{
testQuery(
"SELECT SUBSTRING(dim2, 1, 1) FROM druid.foo LIMIT 2",
@ -1386,7 +1386,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectWithExpressionFilter() throws Exception
public void testSelectWithExpressionFilter()
{
testQuery(
"SELECT dim1 FROM druid.foo WHERE m1 + 1 = 7",
@ -1410,7 +1410,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarWithLimitTimeDescending() throws Exception
public void testSelectStarWithLimitTimeDescending()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -1436,7 +1436,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarWithoutLimitTimeAscending() throws Exception
public void testSelectStarWithoutLimitTimeAscending()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
@ -1467,7 +1467,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
@Test
public void testSelectSingleColumnTwice() throws Exception
public void testSelectSingleColumnTwice()
{
testQuery(
"SELECT dim2 x, dim2 y FROM druid.foo LIMIT 2",
@ -1489,7 +1489,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectSingleColumnWithLimitDescending() throws Exception
public void testSelectSingleColumnWithLimitDescending()
{
testQuery(
"SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 2",
@ -1512,7 +1512,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectStarFromSelectSingleColumnWithLimitDescending() throws Exception
public void testSelectStarFromSelectSingleColumnWithLimitDescending()
{
// After upgrading to Calcite 1.21, Calcite no longer respects the ORDER BY __time DESC
// in the inner query. This is valid, as the SQL standard considers the subquery results to be an unordered
@ -1538,7 +1538,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectLimitWrapping() throws Exception
public void testSelectLimitWrapping()
{
testQuery(
"SELECT dim1 FROM druid.foo ORDER BY __time DESC",
@ -1562,7 +1562,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectLimitWrappingOnTopOfOffset() throws Exception
public void testSelectLimitWrappingOnTopOfOffset()
{
testQuery(
"SELECT dim1 FROM druid.foo ORDER BY __time DESC OFFSET 1",
@ -1587,7 +1587,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectLimitWrappingOnTopOfOffsetAndLowLimit() throws Exception
public void testSelectLimitWrappingOnTopOfOffsetAndLowLimit()
{
testQuery(
"SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 1 OFFSET 1",
@ -1611,7 +1611,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectLimitWrappingOnTopOfOffsetAndHighLimit() throws Exception
public void testSelectLimitWrappingOnTopOfOffsetAndHighLimit()
{
testQuery(
"SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 10 OFFSET 1",
@ -1636,7 +1636,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectLimitWrappingAgainAkaIDontReallyQuiteUnderstandCalciteQueryPlanning() throws Exception
public void testSelectLimitWrappingAgainAkaIDontReallyQuiteUnderstandCalciteQueryPlanning()
{
// this test is for a specific bug encountered where the 2nd query would not plan with auto limit wrapping, but if
// *any* column was removed from the select output, e.g. the first query in this test, then it does plan and
@ -1784,7 +1784,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectProjectionFromSelectSingleColumnWithInnerLimitDescending() throws Exception
public void testSelectProjectionFromSelectSingleColumnWithInnerLimitDescending()
{
testQuery(
"SELECT 'beep ' || dim1 FROM (SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 2)",
@ -1808,7 +1808,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectProjectionFromSelectSingleColumnDescending() throws Exception
public void testSelectProjectionFromSelectSingleColumnDescending()
{
// Regression test for https://github.com/apache/druid/issues/7768.
@ -1840,7 +1840,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testSelectProjectionFromSelectSingleColumnWithInnerAndOuterLimitDescending() throws Exception
public void testSelectProjectionFromSelectSingleColumnWithInnerAndOuterLimitDescending()
{
testQuery(
"SELECT 'beep ' || dim1 FROM (SELECT dim1 FROM druid.foo ORDER BY __time DESC LIMIT 4) LIMIT 2",
@ -1864,7 +1864,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest
}
@Test
public void testOrderThenLimitThenFilter() throws Exception
public void testOrderThenLimitThenFilter()
{
testQuery(
"SELECT dim1 FROM "

View File

@ -42,7 +42,7 @@ import org.junit.Test;
public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
{
@Test
public void testGroupByTimeAndDim() throws Exception
public void testGroupByTimeAndDim()
{
testQuery(
"SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n"
@ -90,7 +90,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTime() throws Exception
public void testGroupByDimAndTime()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -138,7 +138,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeWhereOnTime() throws Exception
public void testGroupByDimAndTimeWhereOnTime()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -182,7 +182,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeOnDim() throws Exception
public void testGroupByDimAndTimeOnDim()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -227,7 +227,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByTimeAndDimOrderByDim() throws Exception
public void testGroupByTimeAndDimOrderByDim()
{
testQuery(
"SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n"
@ -282,7 +282,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByTimeAndDimOrderByDimDesc() throws Exception
public void testGroupByTimeAndDimOrderByDimDesc()
{
testQuery(
"SELECT FLOOR(__time TO MONTH), dim2, SUM(cnt)\n"
@ -337,7 +337,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeOrderByTime() throws Exception
public void testGroupByDimAndTimeOrderByTime()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -394,7 +394,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeOrderByTimeDesc() throws Exception
public void testGroupByDimAndTimeOrderByTimeDesc()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -451,7 +451,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeOrderByTimeAndDim() throws Exception
public void testGroupByDimAndTimeOrderByTimeAndDim()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -509,7 +509,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeOrderByDimAndTime() throws Exception
public void testGroupByDimAndTimeOrderByDimAndTime()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), SUM(cnt)\n"
@ -567,7 +567,7 @@ public class CalciteSimpleQueryTest extends BaseCalciteQueryTest
}
@Test
public void testGroupByDimAndTimeAndDimOrderByDimAndTimeDim() throws Exception
public void testGroupByDimAndTimeAndDimOrderByDimAndTimeDim()
{
testQuery(
"SELECT dim2, FLOOR(__time TO MONTH), dim1, SUM(cnt)\n"

View File

@ -37,7 +37,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest
{
// __time for foo is [2000-01-01, 2000-01-02, 2000-01-03, 2001-01-01, 2001-01-02, 2001-01-03]
@Test
public void testMaxTimeQuery() throws Exception
public void testMaxTimeQuery()
{
HashMap<String, Object> queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT);
queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true);
@ -58,7 +58,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMinTimeQuery() throws Exception
public void testMinTimeQuery()
{
HashMap<String, Object> queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT);
queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true);
@ -79,7 +79,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest
}
@Test
public void testMinTimeQueryWithFilters() throws Exception
public void testMinTimeQueryWithFilters()
{
HashMap<String, Object> queryContext = new HashMap<>(QUERY_CONTEXT_DEFAULT);
queryContext.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true);
@ -107,7 +107,7 @@ public class CalciteTimeBoundaryQueryTest extends BaseCalciteQueryTest
// Currently, if both min(__time) and max(__time) are present, we don't convert it
// to a timeBoundary query. (ref : https://github.com/apache/druid/issues/12479)
@Test
public void testMinMaxTimeQuery() throws Exception
public void testMinMaxTimeQuery()
{
HashMap<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
context.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, true);

View File

@ -285,7 +285,10 @@ public class DruidPlannerResourceAnalyzeTest extends BaseCalciteQueryTest
authConfig,
sql,
context,
CalciteTests.REGULAR_USER_AUTH_RESULT
// Use superuser because, in tests, only the superuser has
// permission on system tables, and we must do authorization to
// obtain resources.
CalciteTests.SUPER_USER_AUTH_RESULT
);
final Set<ResourceAction> expectedResources = new HashSet<>();
if (name != null) {

View File

@ -40,7 +40,6 @@ import org.apache.druid.segment.generator.SegmentGenerator;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -111,7 +110,6 @@ public class SqlVectorizedExpressionSanityTest extends InitializedNullHandlingTe
@BeforeClass
public static void setupClass()
{
Calcites.setSystemProperties();
ExpressionProcessing.initializeForStrictBooleansTests(true);
CLOSER = Closer.create();

View File

@ -37,7 +37,6 @@ import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.easymock.EasyMock;
import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@ -52,14 +51,7 @@ import java.util.concurrent.CountDownLatch;
public abstract class SegmentMetadataCacheCommon extends CalciteTestBase
{
static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig()
{
@Override
public Period getMetadataRefreshPeriod()
{
return new Period("PT1S");
}
};
static final PlannerConfig PLANNER_CONFIG_DEFAULT = PlannerConfig.builder().metadataRefreshPeriod("PT1S").build();
static final List<InputRow> ROWS1 = ImmutableList.of(
CalciteTests.createRow(ImmutableMap.of("t", "2000-01-01", "m1", "1.0", "dim1", "")),

View File

@ -25,7 +25,6 @@ import org.apache.druid.math.expr.ExpressionProcessing;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.SimpleExtraction;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.http.SqlParameter;
import org.junit.BeforeClass;
@ -39,7 +38,6 @@ public abstract class CalciteTestBase
@BeforeClass
public static void setupCalciteProperties()
{
Calcites.setSystemProperties();
NullHandling.initializeForTests();
ExpressionProcessing.initializeForTests(null);
}

View File

@ -112,7 +112,8 @@ import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.Escalator;
import org.apache.druid.server.security.NoopEscalator;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.SqlLifecycleManager;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -789,23 +790,24 @@ public class CalciteTests
);
}
public static SqlLifecycleFactory createSqlLifecycleFactory(final PlannerFactory plannerFactory)
public static SqlStatementFactory createSqlLifecycleFactory(final PlannerFactory plannerFactory)
{
return createSqlLifecycleFactory(plannerFactory, new AuthConfig());
}
public static SqlLifecycleFactory createSqlLifecycleFactory(
public static SqlStatementFactory createSqlLifecycleFactory(
final PlannerFactory plannerFactory,
final AuthConfig authConfig
)
{
return new SqlLifecycleFactory(
return new SqlStatementFactory(
plannerFactory,
new ServiceEmitter("dummy", "dummy", new NoopEmitter()),
new NoopRequestLogger(),
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
authConfig,
Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of()))
Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())),
new SqlLifecycleManager()
);
}
@ -868,6 +870,7 @@ public class CalciteTests
);
}
@SuppressWarnings("resource")
public static SpecificSegmentsQuerySegmentWalker createMockWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final File tmpDir,

View File

@ -80,7 +80,7 @@ public class QueryLogHook implements TestRule
final Consumer<Object> function = query -> {
try {
recordedQueries.add((Query) query);
recordedQueries.add((Query<?>) query);
log.info(
"Issued query: %s",
objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query)

View File

@ -29,7 +29,7 @@ import org.apache.druid.guice.LifecycleModule;
import org.apache.druid.guice.annotations.JSR311Resource;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.sql.SqlStatementFactory;
import org.easymock.EasyMockRunner;
import org.easymock.Mock;
import org.junit.Assert;
@ -46,7 +46,7 @@ public class SqlHttpModuleTest
@Mock
private ObjectMapper jsonMpper;
@Mock
private SqlLifecycleFactory sqlLifecycleFactory;
private SqlStatementFactory sqlLifecycleFactory;
private SqlHttpModule target;
private Injector injector;
@ -60,7 +60,7 @@ public class SqlHttpModuleTest
new DruidGuiceExtensions(),
binder -> {
binder.bind(ObjectMapper.class).annotatedWith(Json.class).toInstance(jsonMpper);
binder.bind(SqlLifecycleFactory.class).toInstance(sqlLifecycleFactory);
binder.bind(SqlStatementFactory.class).toInstance(sqlLifecycleFactory);
binder.bind(AuthorizerMapper.class).toInstance(new AuthorizerMapper(Collections.emptyMap()));
},
target

View File

@ -31,7 +31,6 @@ import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.apache.calcite.avatica.SqlType;
import org.apache.calcite.tools.RelConversionException;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy;
import org.apache.druid.common.exception.ErrorResponseTransformStrategy;
@ -63,23 +62,28 @@ import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.server.QueryScheduler;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.log.TestRequestLogger;
import org.apache.druid.server.metrics.NoopServiceEmitter;
import org.apache.druid.server.scheduling.HiLoQueryLaningStrategy;
import org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.SqlLifecycleFactory;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.HttpStatement;
import org.apache.druid.sql.SqlLifecycleManager;
import org.apache.druid.sql.SqlPlanningException.PlanningError;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.SqlToolbox;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
@ -101,6 +105,7 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.StreamingOutput;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@ -111,10 +116,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -122,6 +129,8 @@ public class SqlResourceTest extends CalciteTestBase
{
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
private static final String DUMMY_SQL_QUERY_ID = "dummy";
private static final int WAIT_TIMEOUT_SECS = 3;
private static final Consumer<DirectStatement> NULL_ACTION = s -> {};
private static final List<String> EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS =
Arrays.asList("__time", "dim1", "dim2", "dim3", "cnt", "m1", "m2", "unique_dim1", "EXPR$8");
@ -138,21 +147,22 @@ public class SqlResourceTest extends CalciteTestBase
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public QueryLogHook queryLogHook = QueryLogHook.create();
private SpecificSegmentsQuerySegmentWalker walker = null;
private SpecificSegmentsQuerySegmentWalker walker;
private TestRequestLogger testRequestLogger;
private SqlResource resource;
private HttpServletRequest req;
private ListeningExecutorService executorService;
private SqlLifecycleManager lifecycleManager;
private SqlLifecycleFactory sqlLifecycleFactory;
private SqlStatementFactory sqlLifecycleFactory;
private CountDownLatch lifecycleAddLatch;
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> validateAndAuthorizeLatchSupplier = new SettableSupplier<>();
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> planLatchSupplier = new SettableSupplier<>();
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> executeLatchSupplier = new SettableSupplier<>();
private final SettableSupplier<Function<Sequence<Object[]>, Sequence<Object[]>>> sequenceMapFnSupplier = new SettableSupplier<>();
private Consumer<DirectStatement> onExecute = NULL_ACTION;
private boolean sleep = false;
private boolean sleep;
@BeforeClass
public static void setUpClass()
@ -200,14 +210,7 @@ public class SqlResourceTest extends CalciteTestBase
executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s"));
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler);
final PlannerConfig plannerConfig = new PlannerConfig()
{
@Override
public boolean shouldSerializeComplexValues()
{
return false;
}
};
final PlannerConfig plannerConfig = PlannerConfig.builder().serializeComplexValues(false).build();
final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(
conglomerate,
walker,
@ -216,24 +219,7 @@ public class SqlResourceTest extends CalciteTestBase
);
final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
req = EasyMock.createStrictMock(HttpServletRequest.class);
EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED))
.andReturn(null)
.anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
EasyMock.expectLastCall().anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
EasyMock.replay(req);
req = request(true);
testRequestLogger = new TestRequestLogger();
@ -252,7 +238,7 @@ public class SqlResourceTest extends CalciteTestBase
lifecycleManager = new SqlLifecycleManager()
{
@Override
public void add(String sqlQueryId, SqlLifecycle lifecycle)
public void add(String sqlQueryId, Cancelable lifecycle)
{
super.add(sqlQueryId, lifecycle);
if (lifecycleAddLatch != null) {
@ -263,31 +249,34 @@ public class SqlResourceTest extends CalciteTestBase
final ServiceEmitter emitter = new NoopServiceEmitter();
final AuthConfig authConfig = new AuthConfig();
final DefaultQueryConfig defaultQueryConfig = new DefaultQueryConfig(ImmutableMap.of());
sqlLifecycleFactory = new SqlLifecycleFactory(
sqlLifecycleFactory = new SqlStatementFactory(
plannerFactory,
emitter,
testRequestLogger,
scheduler,
authConfig,
Suppliers.ofInstance(defaultQueryConfig)
Suppliers.ofInstance(defaultQueryConfig),
lifecycleManager
)
{
@Override
public SqlLifecycle factorize()
public HttpStatement httpStatement(
final SqlQuery sqlQuery,
final HttpServletRequest req
)
{
return new TestSqlLifecycle(
plannerFactory,
emitter,
testRequestLogger,
scheduler,
authConfig,
System.currentTimeMillis(),
System.nanoTime(),
TestHttpStatement stmt = new TestHttpStatement(
lifecycleToolbox,
sqlQuery,
req,
validateAndAuthorizeLatchSupplier,
planLatchSupplier,
executeLatchSupplier,
sequenceMapFnSupplier
sequenceMapFnSupplier,
onExecute
);
onExecute = NULL_ACTION;
return stmt;
}
};
resource = new SqlResource(
@ -299,6 +288,11 @@ public class SqlResourceTest extends CalciteTestBase
);
}
HttpServletRequest request(boolean ok)
{
return makeExpectedReq(CalciteTests.REGULAR_USER_AUTH_RESULT, ok);
}
@After
public void tearDown() throws Exception
{
@ -311,21 +305,7 @@ public class SqlResourceTest extends CalciteTestBase
@Test
public void testUnauthorized() throws Exception
{
HttpServletRequest testRequest = EasyMock.createStrictMock(HttpServletRequest.class);
EasyMock.expect(testRequest.getRemoteAddr()).andReturn(null).once();
EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED))
.andReturn(null)
.anyTimes();
EasyMock.expect(testRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT)
.anyTimes();
testRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false);
EasyMock.expectLastCall().once();
EasyMock.replay(testRequest);
HttpServletRequest testRequest = request(false);
try {
resource.doPost(
@ -358,7 +338,6 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertTrue(lifecycleManager.getAll("id").isEmpty());
}
@Test
public void testCountStarExtendedCharacters() throws Exception
{
@ -1284,16 +1263,29 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertTrue(lifecycleManager.getAll("id").isEmpty());
}
private void failOnExecute(String errorMessage)
{
onExecute = s -> {
throw new QueryUnsupportedException(errorMessage);
};
}
@Test
public void testUnsupportedQueryThrowsException() throws Exception
{
String errorMessage = "This will be support in Druid 9999";
SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of(BaseQuery.SQL_QUERY_ID, "id"));
EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
EasyMock.replay(badQuery);
final QueryException exception = doPost(badQuery).lhs;
String errorMessage = "This will be supported in Druid 9999";
failOnExecute(errorMessage);
final QueryException exception = doPost(
new SqlQuery(
"SELECT ANSWER TO LIFE",
ResultFormat.OBJECT,
false,
false,
false,
ImmutableMap.of(BaseQuery.SQL_QUERY_ID, "id"),
null
)
).lhs;
Assert.assertNotNull(exception);
Assert.assertEquals(QueryUnsupportedException.ERROR_CODE, exception.getErrorCode());
@ -1305,13 +1297,19 @@ public class SqlResourceTest extends CalciteTestBase
public void testErrorResponseReturnSameQueryIdWhenSetInContext() throws Exception
{
String queryId = "id123";
String errorMessage = "This will be support in Druid 9999";
SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", queryId));
EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
EasyMock.replay(badQuery);
final Response response = resource.doPost(badQuery, req);
String errorMessage = "This will be supported in Druid 9999";
failOnExecute(errorMessage);
final Response response = resource.doPost(
new SqlQuery(
"SELECT ANSWER TO LIFE",
ResultFormat.OBJECT,
false,
false,
false,
ImmutableMap.of("sqlQueryId", queryId),
null
),
req);
Assert.assertNotEquals(200, response.getStatus());
final MultivaluedMap<String, Object> headers = response.getMetadata();
Assert.assertTrue(headers.containsKey(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER));
@ -1322,13 +1320,19 @@ public class SqlResourceTest extends CalciteTestBase
@Test
public void testErrorResponseReturnNewQueryIdWhenNotSetInContext() throws Exception
{
String errorMessage = "This will be support in Druid 9999";
SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of());
EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
EasyMock.replay(badQuery);
final Response response = resource.doPost(badQuery, req);
String errorMessage = "This will be supported in Druid 9999";
failOnExecute(errorMessage);
final Response response = resource.doPost(
new SqlQuery(
"SELECT ANSWER TO LIFE",
ResultFormat.OBJECT,
false,
false,
false,
ImmutableMap.of(),
null
),
req);
Assert.assertNotEquals(200, response.getStatus());
final MultivaluedMap<String, Object> headers = response.getMetadata();
Assert.assertTrue(headers.containsKey(SqlResource.SQL_QUERY_ID_RESPONSE_HEADER));
@ -1344,8 +1348,7 @@ public class SqlResourceTest extends CalciteTestBase
CalciteTests.TEST_AUTHORIZER_MAPPER,
sqlLifecycleFactory,
lifecycleManager,
new ServerConfig()
{
new ServerConfig() {
@Override
public boolean isShowDetailedJettyErrors()
{
@ -1360,13 +1363,19 @@ public class SqlResourceTest extends CalciteTestBase
}
);
String errorMessage = "This will be support in Druid 9999";
SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id"));
EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
EasyMock.replay(badQuery);
final QueryException exception = doPost(badQuery).lhs;
String errorMessage = "This will be supported in Druid 9999";
failOnExecute(errorMessage);
final QueryException exception = doPost(
new SqlQuery(
"SELECT ANSWER TO LIFE",
ResultFormat.OBJECT,
false,
false,
false,
ImmutableMap.of("sqlQueryId", "id"),
null
)
).lhs;
Assert.assertNotNull(exception);
Assert.assertNull(exception.getMessage());
@ -1401,17 +1410,26 @@ public class SqlResourceTest extends CalciteTestBase
);
String errorMessage = "could not assert";
SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id"));
EasyMock.expect(badQuery.getParameterList()).andThrow(new Error(errorMessage));
EasyMock.replay(badQuery);
final QueryException exception = doPost(badQuery).lhs;
failOnExecute(errorMessage);
onExecute = s -> {
throw new Error(errorMessage);
};
final QueryException exception = doPost(
new SqlQuery(
"SELECT ANSWER TO LIFE",
ResultFormat.OBJECT,
false,
false,
false,
ImmutableMap.of("sqlQueryId", "id"),
null
)
).lhs;
Assert.assertNotNull(exception);
Assert.assertNull(exception.getMessage());
Assert.assertNull(exception.getHost());
Assert.assertEquals(exception.getErrorCode(), QueryInterruptedException.UNKNOWN_EXCEPTION);
Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, exception.getErrorCode());
Assert.assertNull(exception.getErrorClass());
Assert.assertTrue(lifecycleManager.getAll("id").isEmpty());
}
@ -1446,7 +1464,6 @@ public class SqlResourceTest extends CalciteTestBase
}));
}
int success = 0;
int limited = 0;
for (int i = 0; i < numQueries; i++) {
@ -1491,7 +1508,6 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(timeoutException.getErrorCode(), QueryTimeoutException.ERROR_CODE);
Assert.assertEquals(timeoutException.getErrorClass(), QueryTimeoutException.class.getName());
Assert.assertTrue(lifecycleManager.getAll(sqlQueryId).isEmpty());
}
@Test
@ -1509,8 +1525,8 @@ public class SqlResourceTest extends CalciteTestBase
makeRegularUserReq()
)
);
Assert.assertTrue(validateAndAuthorizeLatch.await(1, TimeUnit.SECONDS));
Assert.assertTrue(lifecycleAddLatch.await(1, TimeUnit.SECONDS));
Assert.assertTrue(validateAndAuthorizeLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS));
Assert.assertTrue(lifecycleAddLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS));
Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel());
planLatch.countDown();
Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus());
@ -1521,7 +1537,7 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus());
QueryException exception = JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryException.class);
Assert.assertEquals(
QueryInterruptedException.QUERY_CANCELLED,
QueryInterruptedException.QUERY_CANCELED,
exception.getErrorCode()
);
}
@ -1540,7 +1556,7 @@ public class SqlResourceTest extends CalciteTestBase
makeRegularUserReq()
)
);
Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS));
Assert.assertTrue(planLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS));
Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel());
execLatch.countDown();
Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus());
@ -1551,7 +1567,7 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus());
QueryException exception = JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryException.class);
Assert.assertEquals(
QueryInterruptedException.QUERY_CANCELLED,
QueryInterruptedException.QUERY_CANCELED,
exception.getErrorCode()
);
}
@ -1570,7 +1586,7 @@ public class SqlResourceTest extends CalciteTestBase
makeRegularUserReq()
)
);
Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS));
Assert.assertTrue(planLatch.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS));
Response response = resource.cancelQuery("invalidQuery", mockRequestForCancel());
Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
@ -1595,7 +1611,7 @@ public class SqlResourceTest extends CalciteTestBase
makeSuperUserReq()
)
);
Assert.assertTrue(planLatch.await(1, TimeUnit.SECONDS));
Assert.assertTrue(planLatch.await(3, TimeUnit.SECONDS));
Response response = resource.cancelQuery(sqlQueryId, mockRequestForCancel());
Assert.assertEquals(Status.FORBIDDEN.getStatusCode(), response.getStatus());
@ -1683,6 +1699,7 @@ public class SqlResourceTest extends CalciteTestBase
}
// Returns either an error or a result, assuming the result is a JSON object.
@SuppressWarnings("unchecked")
private <T> Pair<QueryException, T> doPost(
final SqlQuery query,
final HttpServletRequest req,
@ -1736,12 +1753,17 @@ public class SqlResourceTest extends CalciteTestBase
}
private HttpServletRequest makeExpectedReq(AuthenticationResult authenticationResult)
{
return makeExpectedReq(authenticationResult, true);
}
private HttpServletRequest makeExpectedReq(AuthenticationResult authenticationResult, boolean ok)
{
HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class);
EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(authenticationResult)
.anyTimes();
EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED))
.andReturn(null)
@ -1749,7 +1771,7 @@ public class SqlResourceTest extends CalciteTestBase
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(authenticationResult)
.anyTimes();
req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, ok);
EasyMock.expectLastCall().anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(authenticationResult)
@ -1788,104 +1810,113 @@ public class SqlResourceTest extends CalciteTestBase
};
}
private static class TestSqlLifecycle extends SqlLifecycle
private static class TestHttpStatement extends HttpStatement
{
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> validateAndAuthorizeLatchSupplier;
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> planLatchSupplier;
private final SettableSupplier<NonnullPair<CountDownLatch, Boolean>> executeLatchSupplier;
private final SettableSupplier<Function<Sequence<Object[]>, Sequence<Object[]>>> sequenceMapFnSupplier;
private final Consumer<DirectStatement> onExecute;
private TestSqlLifecycle(
PlannerFactory plannerFactory,
ServiceEmitter emitter,
RequestLogger requestLogger,
QueryScheduler queryScheduler,
AuthConfig authConfig,
long startMs,
long startNs,
private TestHttpStatement(
final SqlToolbox lifecycleContext,
final SqlQuery sqlQuery,
final HttpServletRequest req,
SettableSupplier<NonnullPair<CountDownLatch, Boolean>> validateAndAuthorizeLatchSupplier,
SettableSupplier<NonnullPair<CountDownLatch, Boolean>> planLatchSupplier,
SettableSupplier<NonnullPair<CountDownLatch, Boolean>> executeLatchSupplier,
SettableSupplier<Function<Sequence<Object[]>, Sequence<Object[]>>> sequenceMapFnSupplier
SettableSupplier<Function<Sequence<Object[]>, Sequence<Object[]>>> sequenceMapFnSupplier,
final Consumer<DirectStatement> onAuthorize
)
{
super(plannerFactory, emitter, requestLogger, queryScheduler, authConfig, new DefaultQueryConfig(ImmutableMap.of()), startMs, startNs);
super(lifecycleContext, sqlQuery, req);
this.validateAndAuthorizeLatchSupplier = validateAndAuthorizeLatchSupplier;
this.planLatchSupplier = planLatchSupplier;
this.executeLatchSupplier = executeLatchSupplier;
this.sequenceMapFnSupplier = sequenceMapFnSupplier;
this.onExecute = onAuthorize;
}
@Override
public void validateAndAuthorize(HttpServletRequest req)
protected void authorize(
DruidPlanner planner,
Function<Set<ResourceAction>, Access> authorizer)
{
if (validateAndAuthorizeLatchSupplier.get() != null) {
if (validateAndAuthorizeLatchSupplier.get().rhs) {
super.validateAndAuthorize(req);
super.authorize(planner, authorizer);
validateAndAuthorizeLatchSupplier.get().lhs.countDown();
} else {
try {
if (!validateAndAuthorizeLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) {
if (!validateAndAuthorizeLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) {
throw new RuntimeException("Latch timed out");
}
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
super.validateAndAuthorize(req);
super.authorize(planner, authorizer);
}
} else {
super.validateAndAuthorize(req);
super.authorize(planner, authorizer);
}
}
@Override
public void plan() throws RelConversionException
public PlannerResult plan(DruidPlanner planner)
{
if (planLatchSupplier.get() != null) {
if (planLatchSupplier.get().rhs) {
super.plan();
PlannerResult result = super.plan(planner);
planLatchSupplier.get().lhs.countDown();
return result;
} else {
try {
if (!planLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) {
if (!planLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) {
throw new RuntimeException("Latch timed out");
}
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
super.plan();
return super.plan(planner);
}
} else {
super.plan();
return super.plan(planner);
}
}
@Override
public Sequence<Object[]> execute()
{
onExecute.accept(this);
return super.execute();
}
@Override
public Sequence<Object[]> doExecute()
{
final Function<Sequence<Object[]>, Sequence<Object[]>> sequenceMapFn =
Optional.ofNullable(sequenceMapFnSupplier.get()).orElse(Function.identity());
if (executeLatchSupplier.get() != null) {
if (executeLatchSupplier.get().rhs) {
Sequence<Object[]> sequence = sequenceMapFn.apply(super.execute());
Sequence<Object[]> sequence = sequenceMapFn.apply(super.doExecute());
executeLatchSupplier.get().lhs.countDown();
return sequence;
} else {
try {
if (!executeLatchSupplier.get().lhs.await(1, TimeUnit.SECONDS)) {
if (!executeLatchSupplier.get().lhs.await(WAIT_TIMEOUT_SECS, TimeUnit.SECONDS)) {
throw new RuntimeException("Latch timed out");
}
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
return sequenceMapFn.apply(super.execute());
return sequenceMapFn.apply(super.doExecute());
}
} else {
return sequenceMapFn.apply(super.execute());
return sequenceMapFn.apply(super.doExecute());
}
}
}