SQL: Remove unused escalator, authConfig from various classes. (#5483)

DruidPlanner.plan is responsible for checking authorization, so these objects
weren't needed in as many places as they were injected.
This commit is contained in:
Gian Merlino 2018-03-14 13:28:51 -07:00 committed by Jonathan Wei
parent 40cc2c8740
commit fdd55538e1
15 changed files with 135 additions and 68 deletions

View File

@ -37,7 +37,6 @@ import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.groupby.GroupByQuery;
import io.druid.segment.QueryableIndex;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.NoopEscalator;
import io.druid.sql.calcite.planner.DruidPlanner;
@ -118,9 +117,7 @@ public class SqlBenchmark
CalciteTests.createOperatorTable(),
CalciteTests.createExprMacroTable(),
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
new NoopEscalator(),
CalciteTests.getJsonMapper()
);
groupByQuery = GroupByQuery
@ -182,7 +179,10 @@ public class SqlBenchmark
public void queryPlanner(Blackhole blackhole) throws Exception
{
try (final DruidPlanner planner = plannerFactory.createPlanner(null)) {
final PlannerResult plannerResult = planner.plan(sqlQuery);
final PlannerResult plannerResult = planner.plan(
sqlQuery,
NoopEscalator.getInstance().createEscalatedAuthenticationResult()
);
final List<Object[]> results = plannerResult.run().toList();
blackhole.consume(results);
}

View File

@ -47,7 +47,6 @@ import io.druid.segment.column.ValueType;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.virtual.ExpressionVirtualColumn;
import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.NoopEscalator;
import io.druid.sql.calcite.filtration.Filtration;
@ -137,9 +136,7 @@ public class QuantileSqlAggregatorTest extends CalciteTestBase
operatorTable,
CalciteTests.createExprMacroTable(),
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
new NoopEscalator(),
CalciteTests.getJsonMapper()
);
}
@ -167,7 +164,10 @@ public class QuantileSqlAggregatorTest extends CalciteTestBase
+ "APPROX_QUANTILE(cnt, 0.5)\n"
+ "FROM foo";
final PlannerResult plannerResult = planner.plan(sql);
final PlannerResult plannerResult = planner.plan(
sql,
NoopEscalator.getInstance().createEscalatedAuthenticationResult()
);
// Verify results
final List<Object[]> results = plannerResult.run().toList();
@ -249,7 +249,10 @@ public class QuantileSqlAggregatorTest extends CalciteTestBase
+ "APPROX_QUANTILE(hist_m1, 0.999) FILTER(WHERE dim1 = 'abc')\n"
+ "FROM foo";
final PlannerResult plannerResult = planner.plan(sql);
final PlannerResult plannerResult = planner.plan(
sql,
NoopEscalator.getInstance().createEscalatedAuthenticationResult()
);
// Verify results
final List<Object[]> results = plannerResult.run().toList();
@ -302,7 +305,10 @@ public class QuantileSqlAggregatorTest extends CalciteTestBase
final String sql = "SELECT AVG(x), APPROX_QUANTILE(x, 0.98)\n"
+ "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)";
final PlannerResult plannerResult = planner.plan(sql);
final PlannerResult plannerResult = planner.plan(
sql,
NoopEscalator.getInstance().createEscalatedAuthenticationResult()
);
// Verify results
final List<Object[]> results = plannerResult.run().toList();

View File

@ -19,10 +19,19 @@
package io.druid.server.security;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.druid.java.util.http.client.HttpClient;
public class NoopEscalator implements Escalator
{
private static final NoopEscalator INSTANCE = new NoopEscalator();
@JsonCreator
public static NoopEscalator getInstance()
{
return INSTANCE;
}
@Override
public HttpClient createEscalatedClient(HttpClient baseClient)
{
@ -34,4 +43,23 @@ public class NoopEscalator implements Escalator
{
return AllowAllAuthenticator.ALLOW_ALL_RESULT;
}
@Override
public boolean equals(final Object obj)
{
//noinspection ObjectEquality
return obj.getClass() == getClass();
}
@Override
public int hashCode()
{
return 0;
}
@Override
public String toString()
{
return "NoopEscalator{}";
}
}

View File

@ -0,0 +1,41 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.security;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.segment.TestHelper;
import org.junit.Assert;
import org.junit.Test;
public class EscalatorTest
{
@Test
public void testSerde() throws Exception
{
final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Assert.assertEquals(
NoopEscalator.getInstance(),
objectMapper.readValue(
objectMapper.writeValueAsString(NoopEscalator.getInstance()),
Escalator.class
)
);
}
}

View File

@ -34,7 +34,6 @@ import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.logger.Logger;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.Authenticator;
import io.druid.server.security.AuthenticatorMapper;
@ -67,7 +66,6 @@ public class DruidMeta extends MetaImpl
private final PlannerFactory plannerFactory;
private final ScheduledExecutorService exec;
private final AvaticaServerConfig config;
private final AuthConfig authConfig;
private final List<Authenticator> authenticators;
// Used to track logical connections.
@ -81,14 +79,12 @@ public class DruidMeta extends MetaImpl
public DruidMeta(
final PlannerFactory plannerFactory,
final AvaticaServerConfig config,
final AuthConfig authConfig,
final Injector injector
)
{
super(null);
this.plannerFactory = Preconditions.checkNotNull(plannerFactory, "plannerFactory");
this.config = config;
this.authConfig = authConfig;
this.exec = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setNameFormat(StringUtils.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode())))
@ -183,7 +179,8 @@ public class DruidMeta extends MetaImpl
if (authenticationResult == null) {
throw new ForbiddenException("Authentication failed.");
}
final Signature signature = druidStatement.prepare(plannerFactory, sql, maxRowCount, authenticationResult).getSignature();
final Signature signature = druidStatement.prepare(plannerFactory, sql, maxRowCount, authenticationResult)
.getSignature();
final Frame firstFrame = druidStatement.execute()
.nextFrame(
DruidStatement.START_OFFSET,

View File

@ -162,7 +162,7 @@ public class DruidStatement implements Closeable
try (final DruidPlanner planner = plannerFactory.createPlanner(queryContext)) {
synchronized (lock) {
ensure(State.NEW);
this.plannerResult = planner.plan(query, null, authenticationResult);
this.plannerResult = planner.plan(query, authenticationResult);
this.maxRowCount = maxRowCount;
this.query = query;
this.signature = Meta.Signature.create(

View File

@ -20,11 +20,13 @@
package io.druid.sql.calcite.planner;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.server.security.Access;
@ -32,7 +34,6 @@ import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.AuthorizationUtils;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.Escalator;
import io.druid.server.security.ForbiddenException;
import io.druid.sql.calcite.rel.DruidConvention;
import io.druid.sql.calcite.rel.DruidRel;
@ -61,6 +62,7 @@ import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.calcite.util.Pair;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.io.Closeable;
import java.util.ArrayList;
@ -72,33 +74,44 @@ public class DruidPlanner implements Closeable
private final Planner planner;
private final PlannerContext plannerContext;
private final AuthorizerMapper authorizerMapper;
private final Escalator escalator;
public DruidPlanner(
DruidPlanner(
final Planner planner,
final PlannerContext plannerContext,
final AuthorizerMapper authorizerMapper,
final Escalator escalator
final AuthorizerMapper authorizerMapper
)
{
this.planner = planner;
this.plannerContext = plannerContext;
this.authorizerMapper = authorizerMapper;
this.escalator = escalator;
}
public PlannerResult plan(final String sql) throws SqlParseException, ValidationException, RelConversionException
{
AuthenticationResult authenticationResult = escalator.createEscalatedAuthenticationResult();
return plan(sql, null, authenticationResult);
}
public PlannerResult plan(
final String sql,
final HttpServletRequest request,
final HttpServletRequest request
) throws SqlParseException, ValidationException, RelConversionException, ForbiddenException
{
return plan(sql, Preconditions.checkNotNull(request, "request"), null);
}
public PlannerResult plan(
final String sql,
final AuthenticationResult authenticationResult
) throws SqlParseException, ValidationException, RelConversionException, ForbiddenException
{
return plan(sql, null, Preconditions.checkNotNull(authenticationResult, "authenticationResult"));
}
private PlannerResult plan(
final String sql,
@Nullable final HttpServletRequest request,
@Nullable final AuthenticationResult authenticationResult
) throws SqlParseException, ValidationException, RelConversionException, ForbiddenException
{
if (authenticationResult != null && request != null) {
throw new ISE("Cannot specify both 'request' and 'authenticationResult'");
}
SqlExplain explain = null;
SqlNode parsed = planner.parse(sql);
if (parsed.getKind() == SqlKind.EXPLAIN) {
@ -137,8 +150,8 @@ public class DruidPlanner implements Closeable
private PlannerResult planWithDruidConvention(
final SqlExplain explain,
final RelRoot root,
final HttpServletRequest request,
final AuthenticationResult authenticationResult
@Nullable final HttpServletRequest request,
@Nullable final AuthenticationResult authenticationResult
) throws RelConversionException, ForbiddenException
{
final DruidRel<?> druidRel = (DruidRel<?>) planner.transform(

View File

@ -24,9 +24,7 @@ import com.google.inject.Inject;
import io.druid.guice.annotations.Json;
import io.druid.math.expr.ExprMacroTable;
import io.druid.server.QueryLifecycleFactory;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.Escalator;
import io.druid.sql.calcite.rel.QueryMaker;
import io.druid.sql.calcite.schema.DruidSchema;
import org.apache.calcite.avatica.util.Casing;
@ -64,10 +62,7 @@ public class PlannerFactory
private final ExprMacroTable macroTable;
private final PlannerConfig plannerConfig;
private final ObjectMapper jsonMapper;
private final AuthConfig authConfig;
private final AuthorizerMapper authorizerMapper;
private final Escalator escalator;
@Inject
public PlannerFactory(
@ -76,9 +71,7 @@ public class PlannerFactory
final DruidOperatorTable operatorTable,
final ExprMacroTable macroTable,
final PlannerConfig plannerConfig,
final AuthConfig authConfig,
final AuthorizerMapper authorizerMapper,
final Escalator escalator,
final @Json ObjectMapper jsonMapper
)
{
@ -87,9 +80,7 @@ public class PlannerFactory
this.operatorTable = operatorTable;
this.macroTable = macroTable;
this.plannerConfig = plannerConfig;
this.authConfig = authConfig;
this.authorizerMapper = authorizerMapper;
this.escalator = escalator;
this.jsonMapper = jsonMapper;
}
@ -151,8 +142,7 @@ public class PlannerFactory
return new DruidPlanner(
Frameworks.getPlanner(frameworkConfig),
plannerContext,
authorizerMapper,
escalator
authorizerMapper
);
}
}

View File

@ -21,6 +21,7 @@ package io.druid.sql.calcite.view;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import io.druid.server.security.Escalator;
import io.druid.sql.calcite.planner.DruidPlanner;
import io.druid.sql.calcite.planner.PlannerFactory;
import io.druid.sql.calcite.schema.DruidSchema;
@ -36,11 +37,13 @@ import java.util.List;
public class DruidViewMacro implements TableMacro
{
private final PlannerFactory plannerFactory;
private final Escalator escalator;
private final String viewSql;
public DruidViewMacro(final PlannerFactory plannerFactory, final String viewSql)
public DruidViewMacro(final PlannerFactory plannerFactory, final Escalator escalator, final String viewSql)
{
this.plannerFactory = plannerFactory;
this.escalator = escalator;
this.viewSql = viewSql;
}
@ -49,7 +52,9 @@ public class DruidViewMacro implements TableMacro
{
final RelDataType rowType;
try (final DruidPlanner planner = plannerFactory.createPlanner(null)) {
rowType = planner.plan(viewSql).rowType();
// Using an escalator here is a hack, but it's currently needed to get the row type. Ideally, some
// later refactoring would make this unnecessary, since there is no actual query going out herem.
rowType = planner.plan(viewSql, escalator.createEscalatedAuthenticationResult()).rowType();
}
catch (Exception e) {
throw Throwables.propagate(e);

View File

@ -21,6 +21,7 @@ package io.druid.sql.calcite.view;
import com.google.inject.Inject;
import io.druid.java.util.common.ISE;
import io.druid.server.security.Escalator;
import io.druid.sql.calcite.planner.PlannerFactory;
import org.apache.calcite.schema.TableMacro;
@ -35,17 +36,21 @@ import java.util.concurrent.ConcurrentMap;
public class InProcessViewManager implements ViewManager
{
private final ConcurrentMap<String, DruidViewMacro> views;
private final Escalator escalator;
@Inject
public InProcessViewManager()
public InProcessViewManager(
final Escalator escalator
)
{
this.views = new ConcurrentHashMap<>();
this.escalator = escalator;
}
@Override
public void createView(final PlannerFactory plannerFactory, final String viewName, final String viewSql)
{
final TableMacro oldValue = views.putIfAbsent(viewName, new DruidViewMacro(plannerFactory, viewSql));
final TableMacro oldValue = views.putIfAbsent(viewName, new DruidViewMacro(plannerFactory, escalator, viewSql));
if (oldValue != null) {
throw new ISE("View[%s] already exists", viewName);
}
@ -54,7 +59,7 @@ public class InProcessViewManager implements ViewManager
@Override
public void alterView(final PlannerFactory plannerFactory, final String viewName, final String viewSql)
{
final TableMacro oldValue = views.replace(viewName, new DruidViewMacro(plannerFactory, viewSql));
final TableMacro oldValue = views.replace(viewName, new DruidViewMacro(plannerFactory, escalator, viewSql));
if (oldValue != null) {
throw new ISE("View[%s] does not exist", viewName);
}

View File

@ -85,7 +85,7 @@ public class SqlResource
final DateTimeZone timeZone;
try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) {
plannerResult = planner.plan(sqlQuery.getQuery(), req, null);
plannerResult = planner.plan(sqlQuery.getQuery(), req);
timeZone = planner.getPlannerContext().getTimeZone();
// Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.

View File

@ -41,12 +41,10 @@ import io.druid.java.util.common.Pair;
import io.druid.java.util.common.StringUtils;
import io.druid.math.expr.ExprMacroTable;
import io.druid.server.DruidNode;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.AuthenticatorMapper;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.Escalator;
import io.druid.server.security.NoopEscalator;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.DruidOperatorTable;
import io.druid.sql.calcite.planner.PlannerConfig;
@ -160,13 +158,10 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
operatorTable,
macroTable,
plannerConfig,
new AuthConfig(),
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.TEST_AUTHENTICATOR_ESCALATOR,
CalciteTests.getJsonMapper()
),
AVATICA_CONFIG,
new AuthConfig(),
injector
);
final DruidAvaticaHandler handler = new DruidAvaticaHandler(
@ -744,13 +739,10 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
operatorTable,
macroTable,
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
new NoopEscalator(),
CalciteTests.getJsonMapper()
),
smallFrameConfig,
new AuthConfig(),
injector
)
{

View File

@ -24,9 +24,7 @@ import com.google.common.collect.Lists;
import io.druid.java.util.common.DateTimes;
import io.druid.math.expr.ExprMacroTable;
import io.druid.server.security.AllowAllAuthenticator;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.NoopEscalator;
import io.druid.sql.calcite.planner.DruidOperatorTable;
import io.druid.sql.calcite.planner.PlannerConfig;
import io.druid.sql.calcite.planner.PlannerFactory;
@ -74,9 +72,7 @@ public class DruidStatementTest extends CalciteTestBase
operatorTable,
macroTable,
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
new NoopEscalator(),
CalciteTests.getJsonMapper()
);
}

View File

@ -84,7 +84,6 @@ import io.druid.query.topn.TopNQueryBuilder;
import io.druid.segment.column.Column;
import io.druid.segment.column.ValueType;
import io.druid.segment.virtual.ExpressionVirtualColumn;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.ForbiddenException;
import io.druid.sql.calcite.filtration.Filtration;
@ -6484,7 +6483,7 @@ public class CalciteQueryTest extends CalciteTestBase
final AuthenticationResult authenticationResult
) throws Exception
{
final InProcessViewManager viewManager = new InProcessViewManager();
final InProcessViewManager viewManager = new InProcessViewManager(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR);
final DruidSchema druidSchema = CalciteTests.createMockSchema(walker, plannerConfig, viewManager);
final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
@ -6495,9 +6494,7 @@ public class CalciteQueryTest extends CalciteTestBase
operatorTable,
macroTable,
plannerConfig,
new AuthConfig(),
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.TEST_AUTHENTICATOR_ESCALATOR,
CalciteTests.getJsonMapper()
);
@ -6515,7 +6512,7 @@ public class CalciteQueryTest extends CalciteTestBase
);
try (DruidPlanner planner = plannerFactory.createPlanner(queryContext)) {
final PlannerResult plan = planner.plan(sql, null, authenticationResult);
final PlannerResult plan = planner.plan(sql, authenticationResult);
return plan.run().toList();
}
}

View File

@ -32,7 +32,6 @@ import io.druid.query.ResourceLimitExceededException;
import io.druid.server.security.AllowAllAuthenticator;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.NoopEscalator;
import io.druid.sql.calcite.planner.DruidOperatorTable;
import io.druid.sql.calcite.planner.PlannerConfig;
import io.druid.sql.calcite.planner.PlannerContext;
@ -108,9 +107,7 @@ public class SqlResourceTest extends CalciteTestBase
operatorTable,
macroTable,
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
new NoopEscalator(),
CalciteTests.getJsonMapper()
)
);