Filter unauthorized datasources in INFORMATION_SCHEMA queries (#4998)

* Filter unauthorized datasources in INFORMATION_SCHEMA queries

* PR comments
This commit is contained in:
Jonathan Wei 2017-10-26 12:36:47 -07:00 committed by Gian Merlino
parent 125a912067
commit 3e0a6fc374
8 changed files with 501 additions and 29 deletions

View File

@ -206,6 +206,48 @@ public class AuthorizationUtils
final AuthenticationResult authenticationResult = authenticationResultFromRequest(request);
final Iterable<ResType> filteredResources = filterAuthorizedResources(
authenticationResult,
resources,
resourceActionGenerator,
authorizerMapper
);
// We're filtering, so having access to none of the objects isn't an authorization failure (in terms of whether
// to send an error response or not.)
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
return filteredResources;
}
/**
* Filter a collection of resources by applying the resourceActionGenerator to each resource, return an iterable
* containing the filtered resources.
*
* The resourceActionGenerator returns an Iterable<ResourceAction> for each resource.
*
* If every resource-action in the iterable is authorized, the resource will be added to the filtered resources.
*
* If there is an authorization failure for one of the resource-actions, the resource will not be
* added to the returned filtered resources..
*
* If the resourceActionGenerator returns null for a resource, that resource will not be added to the filtered
* resources.
*
* @param authenticationResult Authentication result representing identity of requester
* @param resources resources to be processed into resource-actions
* @param resourceActionGenerator Function that creates an iterable of resource-actions from a resource
* @param authorizerMapper authorizer mapper
*
* @return Iterable containing resources that were authorized
*/
public static <ResType> Iterable<ResType> filterAuthorizedResources(
final AuthenticationResult authenticationResult,
final Iterable<ResType> resources,
final Function<? super ResType, Iterable<ResourceAction>> resourceActionGenerator,
final AuthorizerMapper authorizerMapper
)
{
final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName());
if (authorizer == null) {
throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName());
@ -236,10 +278,6 @@ public class AuthorizationUtils
}
);
// We're filtering, so having access to none of the objects isn't an authorization failure (in terms of whether
// to send an error response or not.)
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
return filteredResources;
}

View File

@ -28,6 +28,7 @@ import io.druid.java.util.common.StringUtils;
import io.druid.query.ordering.StringComparator;
import io.druid.query.ordering.StringComparators;
import io.druid.segment.column.ValueType;
import io.druid.server.security.AuthorizerMapper;
import io.druid.sql.calcite.schema.DruidSchema;
import io.druid.sql.calcite.schema.InformationSchema;
import org.apache.calcite.jdbc.CalciteSchema;
@ -93,11 +94,11 @@ public class Calcites
return DEFAULT_CHARSET;
}
public static SchemaPlus createRootSchema(final Schema druidSchema)
public static SchemaPlus createRootSchema(final Schema druidSchema, final AuthorizerMapper authorizerMapper)
{
final SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus();
rootSchema.add(DruidSchema.NAME, druidSchema);
rootSchema.add(InformationSchema.NAME, new InformationSchema(rootSchema));
rootSchema.add(InformationSchema.NAME, new InformationSchema(rootSchema, authorizerMapper));
return rootSchema;
}

View File

@ -41,9 +41,12 @@ import java.util.Map;
*/
public class PlannerContext
{
// query context keys
public static final String CTX_SQL_CURRENT_TIMESTAMP = "sqlCurrentTimestamp";
public static final String CTX_SQL_TIME_ZONE = "sqlTimeZone";
public static final String CTX_AUTHENTICATION_RESULT = "authenticationResult";
// DataContext keys
public static final String DATA_CTX_AUTHENTICATION_RESULT = "authenticationResult";
private final DruidOperatorTable operatorTable;
private final ExprMacroTable macroTable;
@ -170,7 +173,8 @@ public class PlannerContext
new DateTime("1970-01-01T00:00:00.000", localNow.getZone()),
localNow
).toDurationMillis(),
DataContext.Variable.TIME_ZONE.camelName, localNow.getZone().toTimeZone()
DataContext.Variable.TIME_ZONE.camelName, localNow.getZone().toTimeZone().clone(),
DATA_CTX_AUTHENTICATION_RESULT, authenticationResult
);
@Override

View File

@ -95,7 +95,7 @@ public class PlannerFactory
public DruidPlanner createPlanner(final Map<String, Object> queryContext)
{
final SchemaPlus rootSchema = Calcites.createRootSchema(druidSchema);
final SchemaPlus rootSchema = Calcites.createRootSchema(druidSchema, authorizerMapper);
final PlannerContext plannerContext = PlannerContext.create(
operatorTable,
macroTable,

View File

@ -25,9 +25,16 @@ import com.google.common.base.Predicates;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import io.druid.segment.column.ValueType;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.AuthorizationUtils;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.ResourceAction;
import io.druid.sql.calcite.planner.PlannerContext;
import io.druid.sql.calcite.table.RowSignature;
import org.apache.calcite.DataContext;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
@ -50,6 +57,7 @@ import org.apache.calcite.sql.type.SqlTypeName;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
public class InformationSchema extends AbstractSchema
{
@ -97,12 +105,19 @@ public class InformationSchema extends AbstractSchema
.add("JDBC_TYPE", ValueType.LONG)
.build();
private static final RelDataTypeSystem TYPE_SYSTEM = RelDataTypeSystem.DEFAULT;
private static final Function<String, Iterable<ResourceAction>> DRUID_TABLE_RA_GENERATOR = datasourceName -> {
return Lists.newArrayList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasourceName));
};
private final SchemaPlus rootSchema;
private final Map<String, Table> tableMap;
private final AuthorizerMapper authorizerMapper;
@Inject
public InformationSchema(final SchemaPlus rootSchema)
public InformationSchema(
final SchemaPlus rootSchema,
final AuthorizerMapper authorizerMapper
)
{
this.rootSchema = Preconditions.checkNotNull(rootSchema, "rootSchema");
this.tableMap = ImmutableMap.<String, Table>of(
@ -110,6 +125,7 @@ public class InformationSchema extends AbstractSchema
TABLES_TABLE, new TablesTable(),
COLUMNS_TABLE, new ColumnsTable()
);
this.authorizerMapper = authorizerMapper;
}
@Override
@ -181,9 +197,23 @@ public class InformationSchema extends AbstractSchema
public Iterable<Object[]> apply(final String schemaName)
{
final SchemaPlus subSchema = rootSchema.getSubSchema(schemaName);
final AuthenticationResult authenticationResult =
(AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT);
final Set<String> authorizedTableNames = getAuthorizedTableNamesFromSubSchema(
subSchema,
authenticationResult
);
final Set<String> authorizedFunctionNames = getAuthorizedFunctionNamesFromSubSchema(
subSchema,
authenticationResult
);
return Iterables.filter(
Iterables.concat(
FluentIterable.from(subSchema.getTableNames()).transform(
FluentIterable.from(authorizedTableNames).transform(
new Function<String, Object[]>()
{
@Override
@ -198,7 +228,7 @@ public class InformationSchema extends AbstractSchema
}
}
),
FluentIterable.from(subSchema.getFunctionNames()).transform(
FluentIterable.from(authorizedFunctionNames).transform(
new Function<String, Object[]>()
{
@Override
@ -262,10 +292,23 @@ public class InformationSchema extends AbstractSchema
final SchemaPlus subSchema = rootSchema.getSubSchema(schemaName);
final JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl(TYPE_SYSTEM);
final AuthenticationResult authenticationResult =
(AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT);
final Set<String> authorizedTableNames = getAuthorizedTableNamesFromSubSchema(
subSchema,
authenticationResult
);
final Set<String> authorizedFunctionNames = getAuthorizedFunctionNamesFromSubSchema(
subSchema,
authenticationResult
);
return Iterables.concat(
Iterables.filter(
Iterables.concat(
FluentIterable.from(subSchema.getTableNames()).transform(
FluentIterable.from(authorizedTableNames).transform(
new Function<String, Iterable<Object[]>>()
{
@Override
@ -280,7 +323,7 @@ public class InformationSchema extends AbstractSchema
}
}
),
FluentIterable.from(subSchema.getFunctionNames()).transform(
FluentIterable.from(authorizedFunctionNames).transform(
new Function<String, Iterable<Object[]>>()
{
@Override
@ -402,4 +445,47 @@ public class InformationSchema extends AbstractSchema
return null;
}
private Set<String> getAuthorizedTableNamesFromSubSchema(
final SchemaPlus subSchema,
final AuthenticationResult authenticationResult
)
{
if (DruidSchema.NAME.equals(subSchema.getName())) {
// The "druid" schema's tables represent Druid datasources which require authorization
return ImmutableSet.copyOf(
AuthorizationUtils.filterAuthorizedResources(
authenticationResult,
subSchema.getTableNames(),
DRUID_TABLE_RA_GENERATOR,
authorizerMapper
)
);
} else {
// for non "druid" schema, we don't filter anything
return subSchema.getTableNames();
}
}
private Set<String> getAuthorizedFunctionNamesFromSubSchema(
final SchemaPlus subSchema,
final AuthenticationResult authenticationResult
)
{
if (DruidSchema.NAME.equals(subSchema.getName())) {
// The "druid" schema's functions represent views on Druid datasources, authorize them as if they were
// datasources for now
return ImmutableSet.copyOf(
AuthorizationUtils.filterAuthorizedResources(
authenticationResult,
subSchema.getFunctionNames(),
DRUID_TABLE_RA_GENERATOR,
authorizerMapper
)
);
} else {
// for non "druid" schema, we don't filter anything
return subSchema.getFunctionNames();
}
}
}

View File

@ -43,6 +43,8 @@ import io.druid.math.expr.ExprMacroTable;
import io.druid.server.DruidNode;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.AuthenticatorMapper;
import io.druid.server.security.AuthorizerMapper;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.DruidOperatorTable;
import io.druid.sql.calcite.planner.PlannerConfig;
@ -114,6 +116,7 @@ public class DruidAvaticaHandlerTest
private SpecificSegmentsQuerySegmentWalker walker;
private Server server;
private Connection client;
private Connection superuserClient;
private Connection clientLosAngeles;
private DruidMeta druidMeta;
private String url;
@ -140,6 +143,8 @@ public class DruidAvaticaHandlerTest
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER);
binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER);
}
}
)
@ -152,8 +157,8 @@ public class DruidAvaticaHandlerTest
macroTable,
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHENTICATOR_MAPPER,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
CalciteTests.TEST_AUTHENTICATOR_MAPPER,
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper()
),
AVATICA_CONFIG,
@ -174,10 +179,12 @@ public class DruidAvaticaHandlerTest
port,
DruidAvaticaHandler.AVATICA_PATH
);
client = DriverManager.getConnection(url, "admin", "druid");
client = DriverManager.getConnection(url, "regularUser", "druid");
superuserClient = DriverManager.getConnection(url, CalciteTests.TEST_SUPERUSER_NAME, "druid");
final Properties propertiesLosAngeles = new Properties();
propertiesLosAngeles.setProperty("sqlTimeZone", "America/Los_Angeles");
propertiesLosAngeles.setProperty("user", "regularUserLA");
clientLosAngeles = DriverManager.getConnection(url, propertiesLosAngeles);
}
@ -344,6 +351,38 @@ public class DruidAvaticaHandlerTest
);
}
@Test
public void testDatabaseMetaDataTablesAsSuperuser() throws Exception
{
final DatabaseMetaData metaData = superuserClient.getMetaData();
Assert.assertEquals(
ImmutableList.of(
ROW(
Pair.of("TABLE_CAT", ""),
Pair.of("TABLE_NAME", CalciteTests.DATASOURCE1),
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_TYPE", "TABLE")
),
ROW(
Pair.of("TABLE_CAT", ""),
Pair.of("TABLE_NAME", CalciteTests.DATASOURCE2),
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_TYPE", "TABLE")
),
ROW(
Pair.of("TABLE_CAT", ""),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_TYPE", "TABLE")
)
),
getRows(
metaData.getTables(null, "druid", "%", null),
ImmutableSet.of("TABLE_CAT", "TABLE_NAME", "TABLE_SCHEM", "TABLE_TYPE")
)
);
}
@Test
public void testDatabaseMetaDataColumns() throws Exception
{
@ -414,6 +453,90 @@ public class DruidAvaticaHandlerTest
);
}
@Test
public void testDatabaseMetaDataColumnsOnForbiddenDatasource() throws Exception
{
final DatabaseMetaData metaData = client.getMetaData();
Assert.assertEquals(
ImmutableList.of(),
getRows(
metaData.getColumns(null, "dr_id", CalciteTests.FORBIDDEN_DATASOURCE, null),
ImmutableSet.of("IS_NULLABLE", "TABLE_NAME", "TABLE_SCHEM", "COLUMN_NAME", "DATA_TYPE", "TYPE_NAME")
)
);
}
@Test
public void testDatabaseMetaDataColumnsWithSuperuser() throws Exception
{
final DatabaseMetaData metaData = superuserClient.getMetaData();
Assert.assertEquals(
ImmutableList.of(
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "__time"),
Pair.of("DATA_TYPE", Types.TIMESTAMP),
Pair.of("TYPE_NAME", "TIMESTAMP"),
Pair.of("IS_NULLABLE", "NO")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "cnt"),
Pair.of("DATA_TYPE", Types.BIGINT),
Pair.of("TYPE_NAME", "BIGINT"),
Pair.of("IS_NULLABLE", "NO")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "dim1"),
Pair.of("DATA_TYPE", Types.VARCHAR),
Pair.of("TYPE_NAME", "VARCHAR"),
Pair.of("IS_NULLABLE", "YES")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "dim2"),
Pair.of("DATA_TYPE", Types.VARCHAR),
Pair.of("TYPE_NAME", "VARCHAR"),
Pair.of("IS_NULLABLE", "YES")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "m1"),
Pair.of("DATA_TYPE", Types.FLOAT),
Pair.of("TYPE_NAME", "FLOAT"),
Pair.of("IS_NULLABLE", "NO")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "m2"),
Pair.of("DATA_TYPE", Types.DOUBLE),
Pair.of("TYPE_NAME", "DOUBLE"),
Pair.of("IS_NULLABLE", "NO")
),
ROW(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
Pair.of("COLUMN_NAME", "unique_dim1"),
Pair.of("DATA_TYPE", Types.OTHER),
Pair.of("TYPE_NAME", "OTHER"),
Pair.of("IS_NULLABLE", "NO")
)
),
getRows(
metaData.getColumns(null, "dr_id", CalciteTests.FORBIDDEN_DATASOURCE, null),
ImmutableSet.of("IS_NULLABLE", "TABLE_NAME", "TABLE_SCHEM", "COLUMN_NAME", "DATA_TYPE", "TYPE_NAME")
)
);
}
@Test(timeout = 90000)
public void testConcurrentQueries() throws Exception
{
@ -633,7 +756,7 @@ public class DruidAvaticaHandlerTest
port,
DruidAvaticaHandler.AVATICA_PATH
);
Connection smallFrameClient = DriverManager.getConnection(smallFrameUrl);
Connection smallFrameClient = DriverManager.getConnection(smallFrameUrl, "regularUser", "druid");
final ResultSet resultSet = smallFrameClient.createStatement().executeQuery(
"SELECT dim1 FROM druid.foo"

View File

@ -89,7 +89,8 @@ import io.druid.segment.column.Column;
import io.druid.segment.column.ValueType;
import io.druid.segment.virtual.ExpressionVirtualColumn;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.ForbiddenException;
import io.druid.sql.calcite.filtration.Filtration;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.DruidOperatorTable;
@ -367,6 +368,25 @@ public class CalciteQueryTest
new Object[]{"INFORMATION_SCHEMA", "TABLES", "SYSTEM_TABLE"}
)
);
testQuery(
PLANNER_CONFIG_DEFAULT,
"SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE\n"
+ "FROM INFORMATION_SCHEMA.TABLES\n"
+ "WHERE TABLE_TYPE IN ('SYSTEM_TABLE', 'TABLE', 'VIEW')",
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
ImmutableList.of(
new Object[]{"druid", CalciteTests.DATASOURCE1, "TABLE"},
new Object[]{"druid", CalciteTests.DATASOURCE2, "TABLE"},
new Object[]{"druid", CalciteTests.FORBIDDEN_DATASOURCE, "TABLE"},
new Object[]{"druid", "aview", "VIEW"},
new Object[]{"druid", "bview", "VIEW"},
new Object[]{"INFORMATION_SCHEMA", "COLUMNS", "SYSTEM_TABLE"},
new Object[]{"INFORMATION_SCHEMA", "SCHEMATA", "SYSTEM_TABLE"},
new Object[]{"INFORMATION_SCHEMA", "TABLES", "SYSTEM_TABLE"}
)
);
}
@Test
@ -389,6 +409,37 @@ public class CalciteQueryTest
);
}
@Test
public void testInformationSchemaColumnsOnForbiddenTable() throws Exception
{
testQuery(
"SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n"
+ "FROM INFORMATION_SCHEMA.COLUMNS\n"
+ "WHERE TABLE_SCHEMA = 'druid' AND TABLE_NAME = 'forbiddenDatasource'",
ImmutableList.of(),
ImmutableList.of()
);
testQuery(
PLANNER_CONFIG_DEFAULT,
"SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE\n"
+ "FROM INFORMATION_SCHEMA.COLUMNS\n"
+ "WHERE TABLE_SCHEMA = 'druid' AND TABLE_NAME = 'forbiddenDatasource'",
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
ImmutableList.of(
new Object[]{"__time", "TIMESTAMP", "NO"},
new Object[]{"cnt", "BIGINT", "NO"},
new Object[]{"dim1", "VARCHAR", "YES"},
new Object[]{"dim2", "VARCHAR", "YES"},
new Object[]{"m1", "FLOAT", "NO"},
new Object[]{"m2", "DOUBLE", "NO"},
new Object[]{"unique_dim1", "OTHER", "NO"}
)
);
}
@Test
public void testInformationSchemaColumnsOnView() throws Exception
{
@ -448,6 +499,33 @@ public class CalciteQueryTest
);
}
@Test
public void testSelectStarOnForbiddenTable() throws Exception
{
assertQueryIsForbidden(
"SELECT * FROM druid.forbiddenDatasource",
CalciteTests.REGULAR_USER_AUTH_RESULT
);
testQuery(
PLANNER_CONFIG_DEFAULT,
"SELECT * FROM druid.forbiddenDatasource",
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.<Query>of(
newScanQueryBuilder()
.dataSource(CalciteTests.FORBIDDEN_DATASOURCE)
.intervals(QSS(Filtration.eternity()))
.columns("__time", "cnt", "dim1", "dim2", "m1", "m2", "unique_dim1")
.resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{T("2000-01-01"), 1L, "forbidden", "abcd", 9999.0f, 0.0, HLLCV1.class.getName()}
)
);
}
@Test
public void testUnqualifiedTableName() throws Exception
{
@ -650,6 +728,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_DEFAULT,
"SELECT dim1 FROM druid.foo GROUP BY dim1 ORDER BY dim1 DESC",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
new GroupByQuery.Builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -692,6 +771,7 @@ public class CalciteQueryTest
+ " druid.foo x INNER JOIN druid.foo y ON x.dim1 = y.dim2\n"
+ "WHERE\n"
+ " x.dim1 <> ''",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
newScanQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -731,6 +811,7 @@ public class CalciteQueryTest
+ " druid.foo x INNER JOIN druid.foo y ON x.dim1 = y.dim2\n"
+ "WHERE\n"
+ " x.dim1 <> ''",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(),
ImmutableList.of(
new Object[]{explanation}
@ -1011,6 +1092,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_FALLBACK,
"SELECT dim1, CAST(SUM(m1) AS FLOAT) AS m1_sum FROM druid.foo GROUP BY dim1 HAVING CAST(SUM(m1) AS FLOAT) > 1",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -1450,7 +1532,7 @@ public class CalciteQueryTest
{
Exception e = null;
try {
testQuery(plannerConfig, sql, ImmutableList.of(), ImmutableList.of());
testQuery(plannerConfig, sql, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(), ImmutableList.of());
}
catch (Exception e1) {
e = e1;
@ -1462,6 +1544,34 @@ public class CalciteQueryTest
}
}
/**
* Provided for tests that wish to check multiple queries instead of relying on ExpectedException.
*/
private void assertQueryIsForbidden(final String sql, final AuthenticationResult authenticationResult)
{
assertQueryIsForbidden(PLANNER_CONFIG_DEFAULT, sql, authenticationResult);
}
private void assertQueryIsForbidden(
final PlannerConfig plannerConfig,
final String sql,
final AuthenticationResult authenticationResult
)
{
Exception e = null;
try {
testQuery(plannerConfig, sql, authenticationResult, ImmutableList.of(), ImmutableList.of());
}
catch (Exception e1) {
e = e1;
}
if (!(e instanceof ForbiddenException)) {
log.error(e, "Expected ForbiddenException for query: %s with authResult: %s", sql, authenticationResult);
Assert.fail(sql);
}
}
@Test
public void testSelectStarWithDimFilter() throws Exception
{
@ -1995,6 +2105,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_NO_TOPN,
"SELECT dim1, MIN(m1) + MAX(m1) AS x FROM druid.foo GROUP BY dim1 ORDER BY x LIMIT 3",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -2040,6 +2151,7 @@ public class CalciteQueryTest
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_NO_TOPN,
"SELECT dim1, MIN(m1) + MAX(m1) AS x FROM druid.foo GROUP BY dim1 ORDER BY x LIMIT 3",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -3202,6 +3314,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_NO_HLL,
"SELECT COUNT(distinct dim2) FROM druid.foo",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(
@ -3240,6 +3353,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_NO_HLL,
"SELECT APPROX_COUNT_DISTINCT(dim2) FROM druid.foo",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -3273,6 +3387,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_NO_HLL,
"SELECT dim2, SUM(cnt), COUNT(distinct dim1) FROM druid.foo GROUP BY dim2",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(
@ -3532,6 +3647,7 @@ public class CalciteQueryTest
+ " SUM(cnt),\n"
+ " COUNT(*)\n"
+ "FROM (SELECT dim2, SUM(cnt) AS cnt FROM druid.foo GROUP BY dim2)",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(
@ -4366,6 +4482,7 @@ public class CalciteQueryTest
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_LOS_ANGELES,
"SELECT CURRENT_TIMESTAMP, CURRENT_DATE, CURRENT_DATE + INTERVAL '1' DAY",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(),
ImmutableList.of(
new Object[]{T("2000-01-01T00Z", LOS_ANGELES), D("1999-12-31"), D("2000-01-01")}
@ -4381,6 +4498,7 @@ public class CalciteQueryTest
QUERY_CONTEXT_LOS_ANGELES,
"SELECT COUNT(*) FROM druid.foo\n"
+ "WHERE __time >= CURRENT_TIMESTAMP + INTERVAL '1' DAY AND __time < TIMESTAMP '2002-01-01 00:00:00'",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -4426,6 +4544,7 @@ public class CalciteQueryTest
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_LOS_ANGELES,
"SELECT * FROM bview",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -4581,6 +4700,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_NO_SUBQUERIES, // Sanity check; this simple query should work with subqueries disabled.
"SELECT floor(CAST(dim1 AS float)), COUNT(*) FROM druid.foo GROUP BY floor(CAST(dim1 AS float))",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -4924,6 +5044,7 @@ public class CalciteQueryTest
+ ") AS x\n"
+ "GROUP BY gran\n"
+ "ORDER BY gran",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -5090,6 +5211,7 @@ public class CalciteQueryTest
+ ") AS x\n"
+ "GROUP BY gran\n"
+ "ORDER BY gran",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -5122,6 +5244,7 @@ public class CalciteQueryTest
+ ") AS x\n"
+ "GROUP BY gran\n"
+ "ORDER BY gran",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
@ -5375,6 +5498,7 @@ public class CalciteQueryTest
+ "EXTRACT(YEAR FROM FLOOR(__time TO YEAR)) AS \"year\", SUM(cnt)\n"
+ "FROM druid.foo\n"
+ "GROUP BY EXTRACT(YEAR FROM FLOOR(__time TO YEAR))",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -5421,6 +5545,7 @@ public class CalciteQueryTest
+ "GROUP BY gran\n"
+ "ORDER BY gran\n"
+ "LIMIT 1",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -5591,6 +5716,7 @@ public class CalciteQueryTest
+ "WHERE dim2 IN (SELECT dim1 FROM druid.foo WHERE dim1 <> '')\n"
+ "AND dim1 <> 'xxx'\n"
+ "group by dim1, dim2 ORDER BY dim2",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
@ -5657,6 +5783,7 @@ public class CalciteQueryTest
testQuery(
PLANNER_CONFIG_FALLBACK,
"EXPLAIN PLAN FOR " + theQuery,
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(),
ImmutableList.of(new Object[]{explanation})
);
@ -5855,6 +5982,7 @@ public class CalciteQueryTest
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
sql,
CalciteTests.REGULAR_USER_AUTH_RESULT,
expectedQueries,
expectedResults
);
@ -5863,31 +5991,34 @@ public class CalciteQueryTest
private void testQuery(
final PlannerConfig plannerConfig,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
{
testQuery(plannerConfig, QUERY_CONTEXT_DEFAULT, sql, expectedQueries, expectedResults);
testQuery(plannerConfig, QUERY_CONTEXT_DEFAULT, sql, authenticationResult, expectedQueries, expectedResults);
}
private void testQuery(
final PlannerConfig plannerConfig,
final Map<String, Object> queryContext,
final String sql,
final AuthenticationResult authenticationResult,
final List<Query> expectedQueries,
final List<Object[]> expectedResults
) throws Exception
{
log.info("SQL: %s", sql);
queryLogHook.clearRecordedQueries();
final List<Object[]> plannerResults = getResults(plannerConfig, queryContext, sql);
final List<Object[]> plannerResults = getResults(plannerConfig, queryContext, sql, authenticationResult);
verifyResults(sql, expectedQueries, expectedResults, plannerResults);
}
private List<Object[]> getResults(
final PlannerConfig plannerConfig,
final Map<String, Object> queryContext,
final String sql
final String sql,
final AuthenticationResult authenticationResult
) throws Exception
{
final InProcessViewManager viewManager = new InProcessViewManager();
@ -5902,8 +6033,8 @@ public class CalciteQueryTest
macroTable,
plannerConfig,
new AuthConfig(),
AuthTestUtils.TEST_AUTHENTICATOR_MAPPER,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
CalciteTests.TEST_AUTHENTICATOR_MAPPER,
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper()
);
@ -5921,7 +6052,7 @@ public class CalciteQueryTest
);
try (DruidPlanner planner = plannerFactory.createPlanner(queryContext)) {
final PlannerResult plan = planner.plan(sql);
final PlannerResult plan = planner.plan(sql, null, authenticationResult);
return Sequences.toList(plan.run(), Lists.newArrayList());
}
}

View File

@ -26,6 +26,7 @@ import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
@ -93,8 +94,17 @@ import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.server.QueryLifecycleFactory;
import io.druid.server.initialization.ServerConfig;
import io.druid.server.log.NoopRequestLogger;
import io.druid.server.security.Access;
import io.druid.server.security.Action;
import io.druid.server.security.AllowAllAuthenticator;
import io.druid.server.security.AuthConfig;
import io.druid.server.security.AuthTestUtils;
import io.druid.server.security.AuthenticationResult;
import io.druid.server.security.Authenticator;
import io.druid.server.security.AuthenticatorMapper;
import io.druid.server.security.Authorizer;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.Resource;
import io.druid.server.security.ResourceType;
import io.druid.sql.calcite.expression.SqlOperatorConversion;
import io.druid.sql.calcite.expression.builtin.LookupOperatorConversion;
import io.druid.sql.calcite.planner.DruidOperatorTable;
@ -122,6 +132,66 @@ public class CalciteTests
{
public static final String DATASOURCE1 = "foo";
public static final String DATASOURCE2 = "foo2";
public static final String FORBIDDEN_DATASOURCE = "forbiddenDatasource";
public static final String TEST_SUPERUSER_NAME = "testSuperuser";
public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null) {
@Override
public Authorizer getAuthorizer(String name)
{
return new Authorizer()
{
@Override
public Access authorize(
AuthenticationResult authenticationResult, Resource resource, Action action
)
{
if (authenticationResult.getIdentity().equals(TEST_SUPERUSER_NAME)) {
return Access.OK;
}
if (resource.getType() == ResourceType.DATASOURCE && resource.getName().equals(FORBIDDEN_DATASOURCE)) {
return new Access(false);
} else {
return Access.OK;
}
}
};
}
};
public static final AuthenticatorMapper TEST_AUTHENTICATOR_MAPPER;
static {
final Map<String, Authenticator> defaultMap = Maps.newHashMap();
defaultMap.put(
"allowAll",
new AllowAllAuthenticator() {
@Override
public AuthenticationResult authenticateJDBCContext(Map<String, Object> context)
{
return new AuthenticationResult((String) context.get("user"), "allowAll", null);
}
@Override
public AuthenticationResult createEscalatedAuthenticationResult()
{
return new AuthenticationResult(TEST_SUPERUSER_NAME, "allowAll", null);
}
}
);
TEST_AUTHENTICATOR_MAPPER = new AuthenticatorMapper(defaultMap, "allowAll");
}
public static final AuthenticationResult REGULAR_USER_AUTH_RESULT = new AuthenticationResult(
"allowAll",
"allowAll",
null
);
public static final AuthenticationResult SUPER_USER_AUTH_RESULT = new AuthenticationResult(
TEST_SUPERUSER_NAME,
"allowAll",
null
);
private static final String TIMESTAMP_COLUMN = "t";
private static final Supplier<SelectQueryConfig> SELECT_CONFIG_SUPPLIER = Suppliers.ofInstance(
@ -303,6 +373,10 @@ public class CalciteTests
createRow("2000-01-01", "друид", "ru", 1.0)
);
public static final List<InputRow> FORBIDDEN_ROWS = ImmutableList.of(
createRow("2000-01-01", "forbidden", "abcd", 9999.0)
);
private CalciteTests()
{
// No instantiation.
@ -330,7 +404,7 @@ public class CalciteTests
new NoopRequestLogger(),
new ServerConfig(),
new AuthConfig(),
AuthTestUtils.TEST_AUTHORIZER_MAPPER
TEST_AUTHORIZER_MAPPER
);
}
@ -355,6 +429,13 @@ public class CalciteTests
.rows(ROWS2)
.buildMMappedIndex();
final QueryableIndex forbiddenIndex = IndexBuilder.create()
.tmpDir(new File(tmpDir, "forbidden"))
.indexMerger(TestHelper.getTestIndexMergerV9())
.schema(INDEX_SCHEMA)
.rows(FORBIDDEN_ROWS)
.buildMMappedIndex();
return new SpecificSegmentsQuerySegmentWalker(queryRunnerFactoryConglomerate()).add(
DataSegment.builder()
.dataSource(DATASOURCE1)
@ -371,6 +452,14 @@ public class CalciteTests
.shardSpec(new LinearShardSpec(0))
.build(),
index2
).add(
DataSegment.builder()
.dataSource(FORBIDDEN_DATASOURCE)
.interval(forbiddenIndex.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.build(),
forbiddenIndex
);
}
@ -415,7 +504,7 @@ public class CalciteTests
new TestServerInventoryView(walker.getSegments()),
plannerConfig,
viewManager,
AuthTestUtils.TEST_AUTHENTICATOR_MAPPER
TEST_AUTHENTICATOR_MAPPER
);
schema.start();