diff --git a/processing/src/main/java/org/apache/druid/query/QueryContexts.java b/processing/src/main/java/org/apache/druid/query/QueryContexts.java index f6528a6d0c6..4979d7ce597 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContexts.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContexts.java @@ -65,6 +65,7 @@ public class QueryContexts public static final String RETURN_PARTIAL_RESULTS_KEY = "returnPartialResults"; public static final String USE_CACHE_KEY = "useCache"; public static final String SECONDARY_PARTITION_PRUNING_KEY = "secondaryPartitionPruning"; + public static final String BY_SEGMENT_KEY = "bySegment"; public static final boolean DEFAULT_BY_SEGMENT = false; public static final boolean DEFAULT_POPULATE_CACHE = true; @@ -139,7 +140,7 @@ public class QueryContexts public static boolean isBySegment(Query query, boolean defaultValue) { - return parseBoolean(query, "bySegment", defaultValue); + return parseBoolean(query, BY_SEGMENT_KEY, defaultValue); } public static boolean isPopulateCache(Query query) diff --git a/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java index 9e8d4aef9bd..bc2109e0c27 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java @@ -26,6 +26,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.CachingEmitter; import org.apache.druid.query.DefaultQueryMetricsTest; import org.apache.druid.query.DruidMetrics; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.dimension.ExtractionDimensionSpec; @@ -69,7 +70,7 @@ public class DefaultGroupByQueryMetricsTest )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)); GroupByQuery query = builder.build(); queryMetrics.query(query); @@ -87,7 +88,7 @@ public class DefaultGroupByQueryMetricsTest Assert.assertEquals("true", actualEvent.get("hasFilters")); Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration")); Assert.assertEquals("", actualEvent.get(DruidMetrics.ID)); - Assert.assertEquals(ImmutableMap.of("bySegment", true), actualEvent.get("context")); + Assert.assertEquals(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true), actualEvent.get("context")); // GroupBy-specific dimensions Assert.assertEquals("1", actualEvent.get("numDimensions")); diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java index b20fe0059e5..bfd40ce5935 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java @@ -7916,7 +7916,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)); final GroupByQuery fullQuery = builder.build(); int segmentCount = 32; @@ -7984,7 +7984,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)); final GroupByQuery fullQuery = builder.build(); int segmentCount = 32; @@ -8051,7 +8051,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .overrideContext(ImmutableMap.of("bySegment", true)); + .overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)); final GroupByQuery fullQuery = builder.build(); int segmentCount = 32; @@ -8581,7 +8581,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(superFilter) - .overrideContext(ImmutableMap.of("bySegment", true)); + .overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)); final GroupByQuery fullQuery = builder.build(); int segmentCount = 32; diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java index 00846ebb5db..7d226fb5a9f 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java @@ -35,6 +35,7 @@ import org.apache.druid.query.BySegmentResultValueClass; import org.apache.druid.query.Druids; import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactory; @@ -915,7 +916,7 @@ public class SegmentMetadataQueryTest TestHelper.assertExpectedObjects( ImmutableList.of(bySegmentResult, bySegmentResult), - myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of("bySegment", true)))), + myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)))), "failed SegmentMetadata bySegment query" ); exec.shutdownNow(); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java index eb8709d4822..0f1c95c4b75 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java @@ -43,6 +43,7 @@ import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.BySegmentResultValue; import org.apache.druid.query.BySegmentResultValueClass; import org.apache.druid.query.FinalizeResultsQueryRunner; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerTestHelper; @@ -1148,7 +1149,7 @@ public class TopNQueryRunnerTest extends InitializedNullHandlingTest { final HashMap specialContext = new HashMap(); - specialContext.put("bySegment", "true"); + specialContext.put(QueryContexts.BY_SEGMENT_KEY, "true"); TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(QueryRunnerTestHelper.ALL_GRAN) @@ -3639,7 +3640,7 @@ public class TopNQueryRunnerTest extends InitializedNullHandlingTest QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, QueryRunnerTestHelper.DEPENDENT_POST_AGG ) - .context(ImmutableMap.of("finalize", true, "bySegment", true)) + .context(ImmutableMap.of(QueryContexts.FINALIZE_KEY, true, QueryContexts.BY_SEGMENT_KEY, true)) .build(); TopNResultValue topNResult = new TopNResultValue( Arrays.>asList( diff --git a/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java b/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java index 369ba2aa20b..44818495890 100644 --- a/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java @@ -316,7 +316,7 @@ public class CachingClusteredClient implements QuerySegmentWalker if (populateCache) { // prevent down-stream nodes from caching results as well if we are populating the cache contextBuilder.put(CacheConfig.POPULATE_CACHE, false); - contextBuilder.put("bySegment", true); + contextBuilder.put(QueryContexts.BY_SEGMENT_KEY, true); } return contextBuilder.build(); } diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientCacheKeyManagerTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientCacheKeyManagerTest.java index 7970c8c893c..c49b02c3a08 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientCacheKeyManagerTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientCacheKeyManagerTest.java @@ -25,6 +25,7 @@ import org.apache.druid.client.selector.QueryableDruidServer; import org.apache.druid.client.selector.ServerSelector; import org.apache.druid.query.CacheStrategy; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.planning.DataSourceAnalysis; import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.timeline.DataSegment; @@ -65,7 +66,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport public void setup() { expect(strategy.computeCacheKey(query)).andReturn(QUERY_CACHE_KEY).anyTimes(); - expect(query.getContextValue("bySegment")).andReturn(false).anyTimes(); + expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(false).anyTimes(); } @After @@ -201,7 +202,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport { expect(dataSourceAnalysis.isJoin()).andReturn(false); reset(query); - expect(query.getContextValue("bySegment")).andReturn(true).anyTimes(); + expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(true).anyTimes(); replayAll(); CachingClusteredClient.CacheKeyManager keyManager = makeKeyManager(); Set selectors = ImmutableSet.of( @@ -270,7 +271,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport public void testSegmentQueryCacheKey_noCachingIfBySegment() { reset(query); - expect(query.getContextValue("bySegment")).andReturn(true).anyTimes(); + expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(true).anyTimes(); replayAll(); byte[] cacheKey = makeKeyManager().computeSegmentLevelQueryCacheKey(); Assert.assertNull(cacheKey); diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java index 5b96d7ad873..cda0170d5f0 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java @@ -2299,11 +2299,11 @@ public class CachingClusteredClientTest QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue(); Query capturedQuery = capturedQueryPlus.getQuery(); if (expectBySegment) { - Assert.assertEquals(true, capturedQuery.getContextValue("bySegment")); + Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY)); } else { Assert.assertTrue( - capturedQuery.getContextValue("bySegment") == null || - capturedQuery.getContextValue("bySegment").equals(false) + capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || + capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false) ); } } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index c09de8d427a..c30aba0fa23 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -37,6 +37,7 @@ import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.server.QueryStats; @@ -151,6 +152,11 @@ public class SqlLifecycle if (queryContext != null) { newContext.putAll(queryContext); } + // "bySegment" results are never valid to use with SQL because the result format is incompatible + // so, overwrite any user specified context to avoid exceptions down the line + if (newContext.remove(QueryContexts.BY_SEGMENT_KEY) != null) { + log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter"); + } newContext.computeIfAbsent(PlannerContext.CTX_SQL_QUERY_ID, k -> UUID.randomUUID().toString()); return newContext; } @@ -459,6 +465,22 @@ public class SqlLifecycle } } + @VisibleForTesting + public State getState() + { + synchronized (lock) { + return state; + } + } + + @VisibleForTesting + public Map getQueryContext() + { + synchronized (lock) { + return queryContext; + } + } + @GuardedBy("lock") private void transition(final State from, final State to) { diff --git a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java new file mode 100644 index 00000000000..6fd383bb247 --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java @@ -0,0 +1,279 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import org.apache.calcite.avatica.SqlType; +import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.sql.parser.SqlParseException; +import org.apache.calcite.tools.RelConversionException; +import org.apache.calcite.tools.ValidationException; +import org.apache.druid.java.util.common.guava.Sequences; +import org.apache.druid.java.util.emitter.service.ServiceEmitter; +import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; +import org.apache.druid.query.QueryContexts; +import org.apache.druid.server.log.RequestLogger; +import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.AuthConfig; +import org.apache.druid.sql.calcite.planner.DruidPlanner; +import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.calcite.planner.PlannerFactory; +import org.apache.druid.sql.calcite.planner.PlannerResult; +import org.apache.druid.sql.calcite.planner.PrepareResult; +import org.apache.druid.sql.calcite.planner.ValidationResult; +import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.http.SqlParameter; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class SqlLifecycleTest +{ + private PlannerFactory plannerFactory; + private ServiceEmitter serviceEmitter; + private RequestLogger requestLogger; + private SqlLifecycleFactory sqlLifecycleFactory; + + @Before + public void setup() + { + this.plannerFactory = EasyMock.createMock(PlannerFactory.class); + this.serviceEmitter = EasyMock.createMock(ServiceEmitter.class); + this.requestLogger = EasyMock.createMock(RequestLogger.class); + this.sqlLifecycleFactory = new SqlLifecycleFactory(plannerFactory, serviceEmitter, requestLogger); + } + + @Test + public void testIgnoredQueryContextParametersAreIgnored() + { + SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); + final String sql = "select 1 + ?"; + final Map queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"); + lifecycle.initialize(sql, queryContext); + Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); + Assert.assertEquals(1, lifecycle.getQueryContext().size()); + // should contain only query id, not bySegment since it is not valid for SQL + Assert.assertTrue(lifecycle.getQueryContext().containsKey(PlannerContext.CTX_SQL_QUERY_ID)); + } + + @Test + public void testStateTransition() + throws ValidationException, SqlParseException, RelConversionException, IOException + { + SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); + final String sql = "select 1 + ?"; + final Map queryContext = Collections.emptyMap(); + Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); + + // test initialize + lifecycle.initialize(sql, queryContext); + Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); + List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); + lifecycle.setParameters(parameters); + // setting parameters should not change the state + Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); + + // test authorization + DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class); + PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class); + ValidationResult validationResult = new ValidationResult(Collections.emptySet()); + EasyMock.expect(plannerFactory.createPlanner(EasyMock.anyObject())).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once(); + mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT); + EasyMock.expectLastCall(); + mockPlannerContext.setParameters(parameters); + EasyMock.expectLastCall(); + EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once(); + mockPlannerContext.setAuthorizationResult(Access.OK); + EasyMock.expectLastCall(); + EasyMock.expect(mockPlanner.validate(sql)).andReturn(validationResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); + + lifecycle.validateAndAuthorize(CalciteTests.REGULAR_USER_AUTH_RESULT); + Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext); + + // test prepare + PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class); + EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.prepare(sql)).andReturn(mockPrepareResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + lifecycle.prepare(); + // prepare doens't change lifecycle state + Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + + // test plan + PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class); + EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.plan(sql)).andReturn(mockPlanResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + PlannerContext context = lifecycle.plan(); + Assert.assertEquals(mockPlannerContext, context); + Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + // test execute + EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + lifecycle.execute(); + Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + // test emit + EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once(); + EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(ImmutableList.of("id")).times(2); + EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once(); + + serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); + EasyMock.expectLastCall(); + serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); + EasyMock.expectLastCall(); + requestLogger.logSqlQuery(EasyMock.anyObject()); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + lifecycle.emitLogsAndMetrics(null, null, 10); + Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + } + + @Test + public void testStateTransitionHttpRequest() + throws ValidationException, SqlParseException, RelConversionException, IOException + { + // this test is a duplicate of testStateTransition except with a slight variation of how validate and authorize + // is run + SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); + final String sql = "select 1 + ?"; + final Map queryContext = Collections.emptyMap(); + Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); + + // test initialize + lifecycle.initialize(sql, queryContext); + Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); + List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); + lifecycle.setParameters(parameters); + // setting parameters should not change the state + Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); + + // test authorization + DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class); + PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class); + ValidationResult validationResult = new ValidationResult(Collections.emptySet()); + EasyMock.expect(plannerFactory.createPlanner(EasyMock.anyObject())).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once(); + mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT); + EasyMock.expectLastCall(); + mockPlannerContext.setParameters(parameters); + EasyMock.expectLastCall(); + EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once(); + mockPlannerContext.setAuthorizationResult(Access.OK); + EasyMock.expectLastCall(); + EasyMock.expect(mockPlanner.validate(sql)).andReturn(validationResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + + HttpServletRequest request = EasyMock.createMock(HttpServletRequest.class); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).times(2); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).once(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).once(); + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); + + lifecycle.validateAndAuthorize(request); + Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request); + + // test prepare + PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class); + EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.prepare(sql)).andReturn(mockPrepareResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + lifecycle.prepare(); + // prepare doens't change lifecycle state + Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult); + + // test plan + PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class); + EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once(); + EasyMock.expect(mockPlanner.plan(sql)).andReturn(mockPlanResult).once(); + mockPlanner.close(); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + PlannerContext context = lifecycle.plan(); + Assert.assertEquals(mockPlannerContext, context); + Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + // test execute + EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + lifecycle.execute(); + Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + // test emit + EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once(); + EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(ImmutableList.of("id")).times(2); + EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once(); + + serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); + EasyMock.expectLastCall(); + serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class)); + EasyMock.expectLastCall(); + requestLogger.logSqlQuery(EasyMock.anyObject()); + EasyMock.expectLastCall(); + EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + + lifecycle.emitLogsAndMetrics(null, null, 10); + Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState()); + EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult); + } +}