ignore bySegment query context for SQL queries (#11352)

* ignore bySegment query context for SQL queries

* revert unintended change
This commit is contained in:
Clint Wylie 2021-06-11 13:49:03 -07:00 committed by GitHub
parent a1ed3a407d
commit 50327b8f63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 323 additions and 17 deletions

View File

@ -65,6 +65,7 @@ public class QueryContexts
public static final String RETURN_PARTIAL_RESULTS_KEY = "returnPartialResults"; public static final String RETURN_PARTIAL_RESULTS_KEY = "returnPartialResults";
public static final String USE_CACHE_KEY = "useCache"; public static final String USE_CACHE_KEY = "useCache";
public static final String SECONDARY_PARTITION_PRUNING_KEY = "secondaryPartitionPruning"; public static final String SECONDARY_PARTITION_PRUNING_KEY = "secondaryPartitionPruning";
public static final String BY_SEGMENT_KEY = "bySegment";
public static final boolean DEFAULT_BY_SEGMENT = false; public static final boolean DEFAULT_BY_SEGMENT = false;
public static final boolean DEFAULT_POPULATE_CACHE = true; public static final boolean DEFAULT_POPULATE_CACHE = true;
@ -139,7 +140,7 @@ public class QueryContexts
public static <T> boolean isBySegment(Query<T> query, boolean defaultValue) public static <T> boolean isBySegment(Query<T> query, boolean defaultValue)
{ {
return parseBoolean(query, "bySegment", defaultValue); return parseBoolean(query, BY_SEGMENT_KEY, defaultValue);
} }
public static <T> boolean isPopulateCache(Query<T> query) public static <T> boolean isPopulateCache(Query<T> query)

View File

@ -26,6 +26,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.CachingEmitter; import org.apache.druid.query.CachingEmitter;
import org.apache.druid.query.DefaultQueryMetricsTest; import org.apache.druid.query.DefaultQueryMetricsTest;
import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.DruidMetrics;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.dimension.ExtractionDimensionSpec; import org.apache.druid.query.dimension.ExtractionDimensionSpec;
@ -69,7 +70,7 @@ public class DefaultGroupByQueryMetricsTest
)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null))
.setContext(ImmutableMap.of("bySegment", true)); .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
GroupByQuery query = builder.build(); GroupByQuery query = builder.build();
queryMetrics.query(query); queryMetrics.query(query);
@ -87,7 +88,7 @@ public class DefaultGroupByQueryMetricsTest
Assert.assertEquals("true", actualEvent.get("hasFilters")); Assert.assertEquals("true", actualEvent.get("hasFilters"));
Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration")); Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration"));
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID)); Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
Assert.assertEquals(ImmutableMap.of("bySegment", true), actualEvent.get("context")); Assert.assertEquals(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true), actualEvent.get("context"));
// GroupBy-specific dimensions // GroupBy-specific dimensions
Assert.assertEquals("1", actualEvent.get("numDimensions")); Assert.assertEquals("1", actualEvent.get("numDimensions"));

View File

@ -7916,7 +7916,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest
.setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null))
.setContext(ImmutableMap.of("bySegment", true)); .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build(); final GroupByQuery fullQuery = builder.build();
int segmentCount = 32; int segmentCount = 32;
@ -7984,7 +7984,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest
)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null))
.setContext(ImmutableMap.of("bySegment", true)); .setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build(); final GroupByQuery fullQuery = builder.build();
int segmentCount = 32; int segmentCount = 32;
@ -8051,7 +8051,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest
)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null))
.overrideContext(ImmutableMap.of("bySegment", true)); .overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build(); final GroupByQuery fullQuery = builder.build();
int segmentCount = 32; int segmentCount = 32;
@ -8581,7 +8581,7 @@ public class GroupByQueryRunnerTest extends InitializedNullHandlingTest
.setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setDimFilter(superFilter) .setDimFilter(superFilter)
.overrideContext(ImmutableMap.of("bySegment", true)); .overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build(); final GroupByQuery fullQuery = builder.build();
int segmentCount = 32; int segmentCount = 32;

View File

@ -35,6 +35,7 @@ import org.apache.druid.query.BySegmentResultValueClass;
import org.apache.druid.query.Druids; import org.apache.druid.query.Druids;
import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.FinalizeResultsQueryRunner;
import org.apache.druid.query.Query; import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory; import org.apache.druid.query.QueryRunnerFactory;
@ -915,7 +916,7 @@ public class SegmentMetadataQueryTest
TestHelper.assertExpectedObjects( TestHelper.assertExpectedObjects(
ImmutableList.of(bySegmentResult, bySegmentResult), ImmutableList.of(bySegmentResult, bySegmentResult),
myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of("bySegment", true)))), myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)))),
"failed SegmentMetadata bySegment query" "failed SegmentMetadata bySegment query"
); );
exec.shutdownNow(); exec.shutdownNow();

View File

@ -43,6 +43,7 @@ import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.BySegmentResultValue; import org.apache.druid.query.BySegmentResultValue;
import org.apache.druid.query.BySegmentResultValueClass; import org.apache.druid.query.BySegmentResultValueClass;
import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.FinalizeResultsQueryRunner;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.QueryRunnerTestHelper;
@ -1148,7 +1149,7 @@ public class TopNQueryRunnerTest extends InitializedNullHandlingTest
{ {
final HashMap<String, Object> specialContext = new HashMap<String, Object>(); final HashMap<String, Object> specialContext = new HashMap<String, Object>();
specialContext.put("bySegment", "true"); specialContext.put(QueryContexts.BY_SEGMENT_KEY, "true");
TopNQuery query = new TopNQueryBuilder() TopNQuery query = new TopNQueryBuilder()
.dataSource(QueryRunnerTestHelper.DATA_SOURCE) .dataSource(QueryRunnerTestHelper.DATA_SOURCE)
.granularity(QueryRunnerTestHelper.ALL_GRAN) .granularity(QueryRunnerTestHelper.ALL_GRAN)
@ -3639,7 +3640,7 @@ public class TopNQueryRunnerTest extends InitializedNullHandlingTest
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
QueryRunnerTestHelper.DEPENDENT_POST_AGG QueryRunnerTestHelper.DEPENDENT_POST_AGG
) )
.context(ImmutableMap.of("finalize", true, "bySegment", true)) .context(ImmutableMap.of(QueryContexts.FINALIZE_KEY, true, QueryContexts.BY_SEGMENT_KEY, true))
.build(); .build();
TopNResultValue topNResult = new TopNResultValue( TopNResultValue topNResult = new TopNResultValue(
Arrays.<Map<String, Object>>asList( Arrays.<Map<String, Object>>asList(

View File

@ -316,7 +316,7 @@ public class CachingClusteredClient implements QuerySegmentWalker
if (populateCache) { if (populateCache) {
// prevent down-stream nodes from caching results as well if we are populating the cache // prevent down-stream nodes from caching results as well if we are populating the cache
contextBuilder.put(CacheConfig.POPULATE_CACHE, false); contextBuilder.put(CacheConfig.POPULATE_CACHE, false);
contextBuilder.put("bySegment", true); contextBuilder.put(QueryContexts.BY_SEGMENT_KEY, true);
} }
return contextBuilder.build(); return contextBuilder.build();
} }

View File

@ -25,6 +25,7 @@ import org.apache.druid.client.selector.QueryableDruidServer;
import org.apache.druid.client.selector.ServerSelector; import org.apache.druid.client.selector.ServerSelector;
import org.apache.druid.query.CacheStrategy; import org.apache.druid.query.CacheStrategy;
import org.apache.druid.query.Query; import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.planning.DataSourceAnalysis; import org.apache.druid.query.planning.DataSourceAnalysis;
import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
@ -65,7 +66,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport
public void setup() public void setup()
{ {
expect(strategy.computeCacheKey(query)).andReturn(QUERY_CACHE_KEY).anyTimes(); expect(strategy.computeCacheKey(query)).andReturn(QUERY_CACHE_KEY).anyTimes();
expect(query.getContextValue("bySegment")).andReturn(false).anyTimes(); expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(false).anyTimes();
} }
@After @After
@ -201,7 +202,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport
{ {
expect(dataSourceAnalysis.isJoin()).andReturn(false); expect(dataSourceAnalysis.isJoin()).andReturn(false);
reset(query); reset(query);
expect(query.getContextValue("bySegment")).andReturn(true).anyTimes(); expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(true).anyTimes();
replayAll(); replayAll();
CachingClusteredClient.CacheKeyManager<Object> keyManager = makeKeyManager(); CachingClusteredClient.CacheKeyManager<Object> keyManager = makeKeyManager();
Set<SegmentServerSelector> selectors = ImmutableSet.of( Set<SegmentServerSelector> selectors = ImmutableSet.of(
@ -270,7 +271,7 @@ public class CachingClusteredClientCacheKeyManagerTest extends EasyMockSupport
public void testSegmentQueryCacheKey_noCachingIfBySegment() public void testSegmentQueryCacheKey_noCachingIfBySegment()
{ {
reset(query); reset(query);
expect(query.getContextValue("bySegment")).andReturn(true).anyTimes(); expect(query.getContextValue(QueryContexts.BY_SEGMENT_KEY)).andReturn(true).anyTimes();
replayAll(); replayAll();
byte[] cacheKey = makeKeyManager().computeSegmentLevelQueryCacheKey(); byte[] cacheKey = makeKeyManager().computeSegmentLevelQueryCacheKey();
Assert.assertNull(cacheKey); Assert.assertNull(cacheKey);

View File

@ -2299,11 +2299,11 @@ public class CachingClusteredClientTest
QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue(); QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
Query capturedQuery = capturedQueryPlus.getQuery(); Query capturedQuery = capturedQueryPlus.getQuery();
if (expectBySegment) { if (expectBySegment) {
Assert.assertEquals(true, capturedQuery.getContextValue("bySegment")); Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
} else { } else {
Assert.assertTrue( Assert.assertTrue(
capturedQuery.getContextValue("bySegment") == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null ||
capturedQuery.getContextValue("bySegment").equals(false) capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false)
); );
} }
} }

View File

@ -37,6 +37,7 @@ import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.server.QueryStats; import org.apache.druid.server.QueryStats;
@ -151,6 +152,11 @@ public class SqlLifecycle
if (queryContext != null) { if (queryContext != null) {
newContext.putAll(queryContext); newContext.putAll(queryContext);
} }
// "bySegment" results are never valid to use with SQL because the result format is incompatible
// so, overwrite any user specified context to avoid exceptions down the line
if (newContext.remove(QueryContexts.BY_SEGMENT_KEY) != null) {
log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter");
}
newContext.computeIfAbsent(PlannerContext.CTX_SQL_QUERY_ID, k -> UUID.randomUUID().toString()); newContext.computeIfAbsent(PlannerContext.CTX_SQL_QUERY_ID, k -> UUID.randomUUID().toString());
return newContext; return newContext;
} }
@ -459,6 +465,22 @@ public class SqlLifecycle
} }
} }
@VisibleForTesting
public State getState()
{
synchronized (lock) {
return state;
}
}
@VisibleForTesting
public Map<String, Object> getQueryContext()
{
synchronized (lock) {
return queryContext;
}
}
@GuardedBy("lock") @GuardedBy("lock")
private void transition(final State from, final State to) private void transition(final State from, final State to)
{ {

View File

@ -0,0 +1,279 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.avatica.SqlType;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceEventBuilder;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.planner.ValidationResult;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.http.SqlParameter;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class SqlLifecycleTest
{
private PlannerFactory plannerFactory;
private ServiceEmitter serviceEmitter;
private RequestLogger requestLogger;
private SqlLifecycleFactory sqlLifecycleFactory;
@Before
public void setup()
{
this.plannerFactory = EasyMock.createMock(PlannerFactory.class);
this.serviceEmitter = EasyMock.createMock(ServiceEmitter.class);
this.requestLogger = EasyMock.createMock(RequestLogger.class);
this.sqlLifecycleFactory = new SqlLifecycleFactory(plannerFactory, serviceEmitter, requestLogger);
}
@Test
public void testIgnoredQueryContextParametersAreIgnored()
{
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
final Map<String, Object> queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true");
lifecycle.initialize(sql, queryContext);
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
Assert.assertEquals(1, lifecycle.getQueryContext().size());
// should contain only query id, not bySegment since it is not valid for SQL
Assert.assertTrue(lifecycle.getQueryContext().containsKey(PlannerContext.CTX_SQL_QUERY_ID));
}
@Test
public void testStateTransition()
throws ValidationException, SqlParseException, RelConversionException, IOException
{
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
final Map<String, Object> queryContext = Collections.emptyMap();
Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState());
// test initialize
lifecycle.initialize(sql, queryContext);
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
List<TypedValue> parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue());
lifecycle.setParameters(parameters);
// setting parameters should not change the state
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
// test authorization
DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class);
PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class);
ValidationResult validationResult = new ValidationResult(Collections.emptySet());
EasyMock.expect(plannerFactory.createPlanner(EasyMock.anyObject())).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once();
mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT);
EasyMock.expectLastCall();
mockPlannerContext.setParameters(parameters);
EasyMock.expectLastCall();
EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once();
mockPlannerContext.setAuthorizationResult(Access.OK);
EasyMock.expectLastCall();
EasyMock.expect(mockPlanner.validate(sql)).andReturn(validationResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
lifecycle.validateAndAuthorize(CalciteTests.REGULAR_USER_AUTH_RESULT);
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext);
// test prepare
PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class);
EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.prepare(sql)).andReturn(mockPrepareResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
lifecycle.prepare();
// prepare doens't change lifecycle state
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
// test plan
PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class);
EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.plan(sql)).andReturn(mockPlanResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
PlannerContext context = lifecycle.plan();
Assert.assertEquals(mockPlannerContext, context);
Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test execute
EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.execute();
Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test emit
EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once();
EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(ImmutableList.of("id")).times(2);
EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
requestLogger.logSqlQuery(EasyMock.anyObject());
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.emitLogsAndMetrics(null, null, 10);
Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
}
@Test
public void testStateTransitionHttpRequest()
throws ValidationException, SqlParseException, RelConversionException, IOException
{
// this test is a duplicate of testStateTransition except with a slight variation of how validate and authorize
// is run
SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sql = "select 1 + ?";
final Map<String, Object> queryContext = Collections.emptyMap();
Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState());
// test initialize
lifecycle.initialize(sql, queryContext);
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
List<TypedValue> parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue());
lifecycle.setParameters(parameters);
// setting parameters should not change the state
Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState());
// test authorization
DruidPlanner mockPlanner = EasyMock.createMock(DruidPlanner.class);
PlannerContext mockPlannerContext = EasyMock.createMock(PlannerContext.class);
ValidationResult validationResult = new ValidationResult(Collections.emptySet());
EasyMock.expect(plannerFactory.createPlanner(EasyMock.anyObject())).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.getPlannerContext()).andReturn(mockPlannerContext).once();
mockPlannerContext.setAuthenticationResult(CalciteTests.REGULAR_USER_AUTH_RESULT);
EasyMock.expectLastCall();
mockPlannerContext.setParameters(parameters);
EasyMock.expectLastCall();
EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once();
mockPlannerContext.setAuthorizationResult(Access.OK);
EasyMock.expectLastCall();
EasyMock.expect(mockPlanner.validate(sql)).andReturn(validationResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
HttpServletRequest request = EasyMock.createMock(HttpServletRequest.class);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).times(2);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).once();
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).once();
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
lifecycle.validateAndAuthorize(request);
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, request);
// test prepare
PrepareResult mockPrepareResult = EasyMock.createMock(PrepareResult.class);
EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.prepare(sql)).andReturn(mockPrepareResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
lifecycle.prepare();
// prepare doens't change lifecycle state
Assert.assertEquals(SqlLifecycle.State.AUTHORIZED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult);
// test plan
PlannerResult mockPlanResult = EasyMock.createMock(PlannerResult.class);
EasyMock.expect(plannerFactory.createPlannerWithContext(EasyMock.eq(mockPlannerContext))).andReturn(mockPlanner).once();
EasyMock.expect(mockPlanner.plan(sql)).andReturn(mockPlanResult).once();
mockPlanner.close();
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
PlannerContext context = lifecycle.plan();
Assert.assertEquals(mockPlannerContext, context);
Assert.assertEquals(SqlLifecycle.State.PLANNED, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test execute
EasyMock.expect(mockPlanResult.run()).andReturn(Sequences.simple(ImmutableList.of(new Object[]{2L}))).once();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.execute();
Assert.assertEquals(SqlLifecycle.State.EXECUTING, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
// test emit
EasyMock.expect(mockPlannerContext.getSqlQueryId()).andReturn("id").once();
EasyMock.expect(mockPlannerContext.getNativeQueryIds()).andReturn(ImmutableList.of("id")).times(2);
EasyMock.expect(mockPlannerContext.getAuthenticationResult()).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).once();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
serviceEmitter.emit(EasyMock.anyObject(ServiceEventBuilder.class));
EasyMock.expectLastCall();
requestLogger.logSqlQuery(EasyMock.anyObject());
EasyMock.expectLastCall();
EasyMock.replay(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
lifecycle.emitLogsAndMetrics(null, null, 10);
Assert.assertEquals(SqlLifecycle.State.DONE, lifecycle.getState());
EasyMock.verify(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
EasyMock.reset(plannerFactory, serviceEmitter, requestLogger, mockPlanner, mockPlannerContext, mockPrepareResult, mockPlanResult);
}
}