Revert permission changes to Supervisor and Task APIs (#11819)

* Revert "Require Datasource WRITE authorization for Supervisor and Task access (#11718)"

This reverts commit f2d6100124dbe7cbc92ad91d28bd12a1800a1f2a.

* Revert "Require DATASOURCE WRITE access in SupervisorResourceFilter and TaskResourceFilter (#11680)"

This reverts commit 6779c4652d531b4d2c7056a69660f4e318f4aef6.

* Fix docs for the reverted commits

* Fix and restore deleted tests

* Fix and restore SystemSchemaTest
This commit is contained in:
Kashif Faraz 2021-10-25 14:50:38 +05:30 committed by GitHub
parent 10c5fa93f1
commit abac9e39ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 237 additions and 428 deletions

View File

@ -51,7 +51,7 @@ The following recommendations apply to the network where Druid runs:
* When possible, use firewall and other network layer filtering to only expose Druid services and ports specifically required for your use case. For example, only expose Broker ports to downstream applications that execute queries. You can limit access to a specific IP address or IP range to further tighten and enhance security. * When possible, use firewall and other network layer filtering to only expose Druid services and ports specifically required for your use case. For example, only expose Broker ports to downstream applications that execute queries. You can limit access to a specific IP address or IP range to further tighten and enhance security.
The following recommendation applies to Druid's authorization and authentication model: The following recommendation applies to Druid's authorization and authentication model:
* Only grant `WRITE` permissions to any `DATASOURCE` to trusted users. Druid's trust model assumes those users have the same privileges as the operating system user that runs the Druid Console process. Additionally, users with `WRITE` permissions can make changes to datasources and they have access to both task and supervisor APIs which may return sensitive information. * Only grant `WRITE` permissions to any `DATASOURCE` to trusted users. Druid's trust model assumes those users have the same privileges as the operating system user that runs the Druid Console process. Additionally, users with `WRITE` permissions can make changes to datasources and they have access to both task and supervisor update (POST) APIs which may affect ingestion.
* Only grant `STATE READ`, `STATE WRITE`, `CONFIG WRITE`, and `DATASOURCE WRITE` permissions to highly-trusted users. These permissions allow users to access resources on behalf of the Druid server process regardless of the datasource. * Only grant `STATE READ`, `STATE WRITE`, `CONFIG WRITE`, and `DATASOURCE WRITE` permissions to highly-trusted users. These permissions allow users to access resources on behalf of the Druid server process regardless of the datasource.
* If your Druid client application allows less-trusted users to control the input source or firehose of an ingestion task, validate the URLs from the users. It is possible to point unchecked URLs to other locations and resources within your network or local file system. * If your Druid client application allows less-trusted users to control the input source or firehose of an ingestion task, validate the URLs from the users. It is possible to point unchecked URLs to other locations and resources within your network or local file system.

View File

@ -142,8 +142,8 @@ Queries on the [system schema tables](../querying/sql.md#system-schema) require
- `segments`: Druid filters segments according to DATASOURCE READ permissions. - `segments`: Druid filters segments according to DATASOURCE READ permissions.
- `servers`: The user requires STATE READ permissions. - `servers`: The user requires STATE READ permissions.
- `server_segments`: The user requires STATE READ permissions. Druid filters segments according to DATASOURCE READ permissions. - `server_segments`: The user requires STATE READ permissions. Druid filters segments according to DATASOURCE READ permissions.
- `tasks`: Druid filters tasks according to DATASOURCE WRITE permissions. - `tasks`: Druid filters tasks according to DATASOURCE READ permissions.
- `supervisors`: Druid filters supervisors according to DATASOURCE WRITE permissions. - `supervisors`: Druid filters supervisors according to DATASOURCE READ permissions.
When the Broker property `druid.sql.planner.authorizeSystemTablesDirectly` is true, users also require `SYSTEM_TABLE` authorization on a system schema table to query it. When the Broker property `druid.sql.planner.authorizeSystemTablesDirectly` is true, users also require `SYSTEM_TABLE` authorization on a system schema table to query it.

View File

@ -30,12 +30,9 @@ import org.apache.druid.indexing.common.actions.LockListAction;
import org.apache.druid.indexing.common.actions.TaskActionClient; import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.query.Query; import org.apache.druid.query.Query;
import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunner;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.joda.time.Interval; import org.joda.time.Interval;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -167,19 +164,6 @@ public abstract class AbstractTask implements Task
return TaskStatus.success(getId()); return TaskStatus.success(getId());
} }
/**
* Authorizes WRITE action on a task's datasource
*
* @throws ForbiddenException if not authorized
*/
public void authorizeRequestForDatasourceWrite(
HttpServletRequest request,
AuthorizerMapper authorizerMapper
) throws ForbiddenException
{
IndexTaskUtils.authorizeRequestForDatasourceWrite(request, dataSource, authorizerMapper);
}
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
{ {

View File

@ -83,6 +83,7 @@ import org.apache.druid.segment.realtime.firehose.ClippedFirehoseFactory;
import org.apache.druid.segment.realtime.firehose.EventReceiverFirehoseFactory; import org.apache.druid.segment.realtime.firehose.EventReceiverFirehoseFactory;
import org.apache.druid.segment.realtime.firehose.TimedShutoffFirehoseFactory; import org.apache.druid.segment.realtime.firehose.TimedShutoffFirehoseFactory;
import org.apache.druid.segment.realtime.plumber.Committers; import org.apache.druid.segment.realtime.plumber.Committers;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.partition.NumberedPartialShardSpec; import org.apache.druid.timeline.partition.NumberedPartialShardSpec;
import org.apache.druid.utils.CloseableUtils; import org.apache.druid.utils.CloseableUtils;
@ -529,7 +530,7 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, Object> returnMap = new HashMap<>(); Map<String, Object> returnMap = new HashMap<>();
Map<String, Object> totalsMap = new HashMap<>(); Map<String, Object> totalsMap = new HashMap<>();
Map<String, Object> averagesMap = new HashMap<>(); Map<String, Object> averagesMap = new HashMap<>();
@ -555,7 +556,7 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
List<String> events = IndexTaskUtils.getMessagesFromSavedParseExceptions( List<String> events = IndexTaskUtils.getMessagesFromSavedParseExceptions(
parseExceptionHandler.getSavedParseExceptions() parseExceptionHandler.getSavedParseExceptions()
); );

View File

@ -63,6 +63,7 @@ import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import org.apache.druid.segment.indexing.granularity.GranularitySpec; import org.apache.druid.segment.indexing.granularity.GranularitySpec;
import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobClient;
@ -633,7 +634,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
@QueryParam("windows") List<Integer> windows @QueryParam("windows") List<Integer> windows
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, Object> returnMap = new HashMap<>(); Map<String, Object> returnMap = new HashMap<>();
Map<String, Object> totalsMap = new HashMap<>(); Map<String, Object> totalsMap = new HashMap<>();

View File

@ -95,6 +95,7 @@ import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata;
import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher; import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher;
import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec; import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
@ -284,7 +285,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetUnparseableEvents(full)).build(); return Response.ok(doGetUnparseableEvents(full)).build();
} }
@ -393,7 +394,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetRowStats(full)).build(); return Response.ok(doGetRowStats(full)).build();
} }
@ -405,7 +406,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, Object> returnMap = new HashMap<>(); Map<String, Object> returnMap = new HashMap<>();
Map<String, Object> ingestionStatsAndErrors = new HashMap<>(); Map<String, Object> ingestionStatsAndErrors = new HashMap<>();
Map<String, Object> payload = new HashMap<>(); Map<String, Object> payload = new HashMap<>();

View File

@ -56,25 +56,28 @@ public class IndexTaskUtils
} }
/** /**
* Authorizes WRITE action on a task's datasource * Authorizes action to be performed on a task's datasource
* *
* @throws ForbiddenException if not authorized * @return authorization result
*/ */
public static void authorizeRequestForDatasourceWrite( public static Access datasourceAuthorizationCheck(
final HttpServletRequest req, final HttpServletRequest req,
Action action,
String datasource, String datasource,
AuthorizerMapper authorizerMapper AuthorizerMapper authorizerMapper
) throws ForbiddenException )
{ {
ResourceAction resourceAction = new ResourceAction( ResourceAction resourceAction = new ResourceAction(
new Resource(datasource, ResourceType.DATASOURCE), new Resource(datasource, ResourceType.DATASOURCE),
Action.WRITE action
); );
Access access = AuthorizationUtils.authorizeResourceAction(req, resourceAction, authorizerMapper); Access access = AuthorizationUtils.authorizeResourceAction(req, resourceAction, authorizerMapper);
if (!access.isAllowed()) { if (!access.isAllowed()) {
throw new ForbiddenException(access.toString()); throw new ForbiddenException(access.toString());
} }
return access;
} }
public static void setTaskDimensions(final ServiceMetricEvent.Builder metricBuilder, final Task task) public static void setTaskDimensions(final ServiceMetricEvent.Builder metricBuilder, final Task task)

View File

@ -53,6 +53,7 @@ import org.apache.druid.indexing.common.task.CurrentSubTaskHolder;
import org.apache.druid.indexing.common.task.IndexTask; import org.apache.druid.indexing.common.task.IndexTask;
import org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec; import org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec;
import org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig; import org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig;
import org.apache.druid.indexing.common.task.IndexTaskUtils;
import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.indexing.common.task.TaskResource; import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.indexing.common.task.Tasks; import org.apache.druid.indexing.common.task.Tasks;
@ -74,6 +75,8 @@ import org.apache.druid.segment.indexing.granularity.GranularitySpec;
import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec; import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec;
import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher; import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher;
import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.segment.realtime.firehose.ChatHandlers;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.BuildingShardSpec; import org.apache.druid.timeline.partition.BuildingShardSpec;
@ -1173,7 +1176,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); ChatHandlers.authorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
if (toolbox == null) { if (toolbox == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1252,7 +1255,12 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); ChatHandlers.authorizationCheck(
req,
Action.WRITE,
getDataSource(),
authorizerMapper
);
if (currentSubTaskHolder == null || currentSubTaskHolder.getTask() == null) { if (currentSubTaskHolder == null || currentSubTaskHolder.getTask() == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
} else { } else {
@ -1270,7 +1278,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getMode(@Context final HttpServletRequest req) public Response getMode(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(isParallelMode() ? "parallel" : "sequential").build(); return Response.ok(isParallelMode() ? "parallel" : "sequential").build();
} }
@ -1279,7 +1287,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getPhaseName(@Context final HttpServletRequest req) public Response getPhaseName(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
if (isParallelMode()) { if (isParallelMode()) {
final ParallelIndexTaskRunner runner = getCurrentRunner(); final ParallelIndexTaskRunner runner = getCurrentRunner();
if (runner == null) { if (runner == null) {
@ -1297,7 +1305,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getProgress(@Context final HttpServletRequest req) public Response getProgress(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1311,7 +1319,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getRunningTasks(@Context final HttpServletRequest req) public Response getRunningTasks(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1325,7 +1333,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getSubTaskSpecs(@Context final HttpServletRequest req) public Response getSubTaskSpecs(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1339,7 +1347,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getRunningSubTaskSpecs(@Context final HttpServletRequest req) public Response getRunningSubTaskSpecs(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1353,7 +1361,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getCompleteSubTaskSpecs(@Context final HttpServletRequest req) public Response getCompleteSubTaskSpecs(@Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1367,7 +1375,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getSubTaskSpec(@PathParam("id") String id, @Context final HttpServletRequest req) public Response getSubTaskSpec(@PathParam("id") String id, @Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
@ -1387,7 +1395,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response getSubTaskState(@PathParam("id") String id, @Context final HttpServletRequest req) public Response getSubTaskState(@PathParam("id") String id, @Context final HttpServletRequest req)
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1409,7 +1417,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
final ParallelIndexTaskRunner currentRunner = getCurrentRunner(); final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
if (currentRunner == null) { if (currentRunner == null) {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build(); return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
@ -1561,7 +1569,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetRowStatsAndUnparseableEvents(full, false).lhs).build(); return Response.ok(doGetRowStatsAndUnparseableEvents(full, false).lhs).build();
} }
@ -1606,8 +1614,8 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetLiveReports(full)).build(); return Response.ok(doGetLiveReports(full)).build();
} }
} }

View File

@ -67,6 +67,7 @@ import org.apache.druid.segment.realtime.appenderator.BaseAppenderatorDriver;
import org.apache.druid.segment.realtime.appenderator.BatchAppenderatorDriver; import org.apache.druid.segment.realtime.appenderator.BatchAppenderatorDriver;
import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata; import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata;
import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.TimelineObjectHolder;
@ -487,7 +488,7 @@ public class SinglePhaseSubTask extends AbstractBatchSubtask implements ChatHand
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, List<String>> events = new HashMap<>(); Map<String, List<String>> events = new HashMap<>();
boolean needsBuildSegments = false; boolean needsBuildSegments = false;
@ -562,7 +563,7 @@ public class SinglePhaseSubTask extends AbstractBatchSubtask implements ChatHand
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetRowStats(full)).build(); return Response.ok(doGetRowStats(full)).build();
} }
@ -594,7 +595,7 @@ public class SinglePhaseSubTask extends AbstractBatchSubtask implements ChatHand
@QueryParam("full") String full @QueryParam("full") String full
) )
{ {
authorizeRequestForDatasourceWrite(req, authorizerMapper); IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetLiveReports(full)).build(); return Response.ok(doGetLiveReports(full)).build();
} }

View File

@ -575,11 +575,11 @@ public class OverlordResource
} }
} }
// early authorization check if datasource != null // early authorization check if datasource != null
// fail fast if user not authorized to write to datasource // fail fast if user not authorized to access datasource
if (dataSource != null) { if (dataSource != null) {
final ResourceAction resourceAction = new ResourceAction( final ResourceAction resourceAction = new ResourceAction(
new Resource(dataSource, ResourceType.DATASOURCE), new Resource(dataSource, ResourceType.DATASOURCE),
Action.WRITE Action.READ
); );
final Access authResult = AuthorizationUtils.authorizeResourceAction( final Access authResult = AuthorizationUtils.authorizeResourceAction(
req, req,
@ -987,7 +987,7 @@ public class OverlordResource
); );
} }
return Collections.singletonList( return Collections.singletonList(
new ResourceAction(new Resource(taskDatasource, ResourceType.DATASOURCE), Action.WRITE) new ResourceAction(new Resource(taskDatasource, ResourceType.DATASOURCE), Action.READ)
); );
}; };
List<TaskStatusPlus> optionalTypeFilteredList = collectionToFilter; List<TaskStatusPlus> optionalTypeFilteredList = collectionToFilter;

View File

@ -19,6 +19,7 @@
package org.apache.druid.indexing.overlord.http.security; package org.apache.druid.indexing.overlord.http.security;
import com.google.common.base.Function;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
@ -30,9 +31,11 @@ import org.apache.druid.indexing.overlord.supervisor.SupervisorSpec;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.server.http.security.AbstractResourceFilter; import org.apache.druid.server.http.security.AbstractResourceFilter;
import org.apache.druid.server.security.Access; import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceAction;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.PathSegment; import javax.ws.rs.core.PathSegment;
@ -88,11 +91,13 @@ public class SupervisorResourceFilter extends AbstractResourceFilter
"No dataSources found to perform authorization checks" "No dataSources found to perform authorization checks"
); );
// Supervisor APIs should always require DATASOURCE WRITE access Function<String, ResourceAction> resourceActionFunction = getAction(request) == Action.READ ?
// as they deal with ingestion related information AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR :
AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR;
Access authResult = AuthorizationUtils.authorizeAllResourceActions( Access authResult = AuthorizationUtils.authorizeAllResourceActions(
getReq(), getReq(),
Iterables.transform(spec.getDataSources(), AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR), Iterables.transform(spec.getDataSources(), resourceActionFunction),
getAuthorizerMapper() getAuthorizerMapper()
); );

View File

@ -30,7 +30,6 @@ import org.apache.druid.indexing.overlord.TaskStorageQueryAdapter;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.server.http.security.AbstractResourceFilter; import org.apache.druid.server.http.security.AbstractResourceFilter;
import org.apache.druid.server.security.Access; import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.ForbiddenException;
@ -86,11 +85,9 @@ public class TaskResourceFilter extends AbstractResourceFilter
} }
final String dataSourceName = Preconditions.checkNotNull(taskOptional.get().getDataSource()); final String dataSourceName = Preconditions.checkNotNull(taskOptional.get().getDataSource());
// Task APIs should always require DATASOURCE WRITE access
// as they deal with ingestion related information
final ResourceAction resourceAction = new ResourceAction( final ResourceAction resourceAction = new ResourceAction(
new Resource(dataSourceName, ResourceType.DATASOURCE), new Resource(dataSourceName, ResourceType.DATASOURCE),
Action.WRITE getAction(request)
); );
final Access authResult = AuthorizationUtils.authorizeResourceAction( final Access authResult = AuthorizationUtils.authorizeResourceAction(

View File

@ -65,7 +65,7 @@ import java.util.stream.Collectors;
@Path("/druid/indexer/v1/supervisor") @Path("/druid/indexer/v1/supervisor")
public class SupervisorResource public class SupervisorResource
{ {
private static final Function<VersionedSupervisorSpec, Iterable<ResourceAction>> SPEC_DATASOURCE_WRITE_RA_GENERATOR = private static final Function<VersionedSupervisorSpec, Iterable<ResourceAction>> SPEC_DATASOURCE_READ_RA_GENERATOR =
supervisorSpec -> { supervisorSpec -> {
if (supervisorSpec.getSpec() == null) { if (supervisorSpec.getSpec() == null) {
return null; return null;
@ -75,7 +75,7 @@ public class SupervisorResource
} }
return Iterables.transform( return Iterables.transform(
supervisorSpec.getSpec().getDataSources(), supervisorSpec.getSpec().getDataSources(),
AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR
); );
}; };
@ -376,7 +376,7 @@ public class SupervisorResource
AuthorizationUtils.filterAuthorizedResources( AuthorizationUtils.filterAuthorizedResources(
req, req,
manager.getSupervisorHistory(), manager.getSupervisorHistory(),
SPEC_DATASOURCE_WRITE_RA_GENERATOR, SPEC_DATASOURCE_READ_RA_GENERATOR,
authorizerMapper authorizerMapper
) )
).build() ).build()
@ -400,7 +400,7 @@ public class SupervisorResource
AuthorizationUtils.filterAuthorizedResources( AuthorizationUtils.filterAuthorizedResources(
req, req,
historyForId, historyForId,
SPEC_DATASOURCE_WRITE_RA_GENERATOR, SPEC_DATASOURCE_READ_RA_GENERATOR,
authorizerMapper authorizerMapper
) )
); );

View File

@ -81,6 +81,8 @@ import org.apache.druid.segment.realtime.appenderator.AppenderatorDriverAddResul
import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata; import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata;
import org.apache.druid.segment.realtime.appenderator.StreamAppenderatorDriver; import org.apache.druid.segment.realtime.appenderator.StreamAppenderatorDriver;
import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.utils.CollectionUtils; import org.apache.druid.utils.CollectionUtils;
@ -1359,10 +1361,12 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
/** /**
* Authorizes action to be performed on this task's datasource * Authorizes action to be performed on this task's datasource
*
* @return authorization result
*/ */
private void authorizeRequest(final HttpServletRequest req) private Access authorizationCheck(final HttpServletRequest req, Action action)
{ {
task.authorizeRequestForDatasourceWrite(req, authorizerMapper); return IndexTaskUtils.datasourceAuthorizationCheck(req, action, task.getDataSource(), authorizerMapper);
} }
public Appenderator getAppenderator() public Appenderator getAppenderator()
@ -1439,7 +1443,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Path("/stop") @Path("/stop")
public Response stop(@Context final HttpServletRequest req) public Response stop(@Context final HttpServletRequest req)
{ {
authorizeRequest(req); authorizationCheck(req, Action.WRITE);
stopGracefully(); stopGracefully();
return Response.status(Response.Status.OK).build(); return Response.status(Response.Status.OK).build();
} }
@ -1449,7 +1453,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Status getStatusHTTP(@Context final HttpServletRequest req) public Status getStatusHTTP(@Context final HttpServletRequest req)
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return status; return status;
} }
@ -1464,7 +1468,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Map<PartitionIdType, SequenceOffsetType> getCurrentOffsets(@Context final HttpServletRequest req) public Map<PartitionIdType, SequenceOffsetType> getCurrentOffsets(@Context final HttpServletRequest req)
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return getCurrentOffsets(); return getCurrentOffsets();
} }
@ -1478,7 +1482,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Map<PartitionIdType, SequenceOffsetType> getEndOffsetsHTTP(@Context final HttpServletRequest req) public Map<PartitionIdType, SequenceOffsetType> getEndOffsetsHTTP(@Context final HttpServletRequest req)
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return getEndOffsets(); return getEndOffsets();
} }
@ -1498,7 +1502,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) throws InterruptedException ) throws InterruptedException
{ {
authorizeRequest(req); authorizationCheck(req, Action.WRITE);
return setEndOffsets(sequences, finish); return setEndOffsets(sequences, finish);
} }
@ -1548,7 +1552,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return Response.ok(doGetRowStats()).build(); return Response.ok(doGetRowStats()).build();
} }
@ -1559,7 +1563,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return Response.ok(doGetLiveReports()).build(); return Response.ok(doGetLiveReports()).build();
} }
@ -1571,7 +1575,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
List<String> events = IndexTaskUtils.getMessagesFromSavedParseExceptions( List<String> events = IndexTaskUtils.getMessagesFromSavedParseExceptions(
parseExceptionHandler.getSavedParseExceptions() parseExceptionHandler.getSavedParseExceptions()
); );
@ -1722,7 +1726,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) )
{ {
authorizeRequest(req); authorizationCheck(req, Action.READ);
return getCheckpoints(); return getCheckpoints();
} }
@ -1749,7 +1753,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Context final HttpServletRequest req @Context final HttpServletRequest req
) throws InterruptedException ) throws InterruptedException
{ {
authorizeRequest(req); authorizationCheck(req, Action.WRITE);
return pause(); return pause();
} }
@ -1804,7 +1808,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Path("/resume") @Path("/resume")
public Response resumeHTTP(@Context final HttpServletRequest req) throws InterruptedException public Response resumeHTTP(@Context final HttpServletRequest req) throws InterruptedException
{ {
authorizeRequest(req); authorizationCheck(req, Action.WRITE);
resume(); resume();
return Response.status(Response.Status.OK).build(); return Response.status(Response.Status.OK).build();
} }
@ -1837,7 +1841,7 @@ public abstract class SeekableStreamIndexTaskRunner<PartitionIdType, SequenceOff
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public DateTime getStartTime(@Context final HttpServletRequest req) public DateTime getStartTime(@Context final HttpServletRequest req)
{ {
authorizeRequest(req); authorizationCheck(req, Action.WRITE);
return startTime; return startTime;
} }

View File

@ -90,14 +90,6 @@ import org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter;
import org.apache.druid.segment.realtime.plumber.NoopSegmentHandoffNotifierFactory; import org.apache.druid.segment.realtime.plumber.NoopSegmentHandoffNotifierFactory;
import org.apache.druid.segment.transform.ExpressionTransform; import org.apache.druid.segment.transform.ExpressionTransform;
import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.segment.transform.TransformSpec;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.Authorizer;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec; import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
@ -119,7 +111,6 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -2613,81 +2604,6 @@ public class IndexTaskTest extends IngestionTestBase
} }
} }
@Test
public void testAuthorizeRequestForDatasourceWrite() throws Exception
{
// Need to run this only once
if (lockGranularity == LockGranularity.SEGMENT) {
return;
}
// Create auth mapper which allows datasourceReadUser to read datasource
// and datasourceWriteUser to write to datasource
final String datasourceWriteUser = "datasourceWriteUser";
final String datasourceReadUser = "datasourceReadUser";
AuthorizerMapper authorizerMapper = new AuthorizerMapper(null) {
@Override
public Authorizer getAuthorizer(String name)
{
return (authenticationResult, resource, action) -> {
final String username = authenticationResult.getIdentity();
if (!resource.getType().equals(ResourceType.DATASOURCE) || username == null) {
return new Access(false);
} else if (action == Action.WRITE) {
return new Access(username.equals(datasourceWriteUser));
} else {
return new Access(username.equals(datasourceReadUser));
}
};
}
};
// Create test target
final IndexTask indexTask = new IndexTask(
null,
null,
createDefaultIngestionSpec(
jsonMapper,
temporaryFolder.newFolder(),
null,
null,
createTuningConfigWithMaxRowsPerSegment(2, true),
false,
false
),
null
);
// Verify that datasourceWriteUser is successfully authorized
HttpServletRequest writeUserRequest = EasyMock.mock(HttpServletRequest.class);
expectAuthorizationTokenCheck(datasourceWriteUser, writeUserRequest);
EasyMock.replay(writeUserRequest);
indexTask.authorizeRequestForDatasourceWrite(writeUserRequest, authorizerMapper);
// Verify that datasourceReadUser is not successfully authorized
HttpServletRequest readUserRequest = EasyMock.mock(HttpServletRequest.class);
expectAuthorizationTokenCheck(datasourceReadUser, readUserRequest);
EasyMock.replay(readUserRequest);
expectedException.expect(ForbiddenException.class);
indexTask.authorizeRequestForDatasourceWrite(readUserRequest, authorizerMapper);
}
private void expectAuthorizationTokenCheck(String username, HttpServletRequest request)
{
AuthenticationResult authenticationResult = new AuthenticationResult(username, "druid", null, null);
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).atLeastOnce();
EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(authenticationResult)
.atLeastOnce();
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false);
EasyMock.expectLastCall().anyTimes();
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
EasyMock.expectLastCall().anyTimes();
}
@Test @Test
public void testEqualsAndHashCode() public void testEqualsAndHashCode()
{ {

View File

@ -122,16 +122,14 @@ public class OverlordResourceTest
case "allow": case "allow":
return new Access(true); return new Access(true);
case Datasources.WIKIPEDIA: case Datasources.WIKIPEDIA:
// All users can read wikipedia but only writer can write // Only "Wiki Reader" can read "wikipedia"
return new Access( return new Access(
action == Action.READ action == Action.READ && Users.WIKI_READER.equals(username)
|| (action == Action.WRITE && Users.WIKI_WRITER.equals(username))
); );
case Datasources.BUZZFEED: case Datasources.BUZZFEED:
// All users can read buzzfeed but only writer can write // Only "Buzz Reader" can read "buzzfeed"
return new Access( return new Access(
action == Action.READ action == Action.READ && Users.BUZZ_READER.equals(username)
|| (action == Action.WRITE && Users.BUZZ_WRITER.equals(username))
); );
default: default:
return new Access(false); return new Access(false);
@ -857,11 +855,11 @@ public class OverlordResourceTest
} }
@Test @Test
public void testGetTasksRequiresDatasourceWrite() public void testGetTasksRequiresDatasourceRead()
{ {
// Setup mocks for a user who has write access to "wikipedia" // Setup mocks for a user who has read access to "wikipedia"
// and read access to "buzzfeed" // and no access to "buzzfeed"
expectAuthorizationTokenCheck(Users.WIKI_WRITER); expectAuthorizationTokenCheck(Users.WIKI_READER);
// Setup mocks to return completed, active, known, pending and running tasks // Setup mocks to return completed, active, known, pending and running tasks
EasyMock.expect(taskStorageQueryAdapter.getCompletedTaskInfoByCreatedTimeDuration(null, null, null)).andStubReturn( EasyMock.expect(taskStorageQueryAdapter.getCompletedTaskInfoByCreatedTimeDuration(null, null, null)).andStubReturn(
@ -907,7 +905,7 @@ public class OverlordResourceTest
workerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter
); );
// Verify that only the tasks of write access datasource are returned // Verify that only the tasks of read access datasource are returned
List<TaskStatusPlus> responseObjects = (List<TaskStatusPlus>) overlordResource List<TaskStatusPlus> responseObjects = (List<TaskStatusPlus>) overlordResource
.getTasks(null, null, null, null, null, req) .getTasks(null, null, null, null, null, req)
.getEntity(); .getEntity();
@ -918,11 +916,11 @@ public class OverlordResourceTest
} }
@Test @Test
public void testGetTasksFilterByDatasourceRequiresWrite() public void testGetTasksFilterByDatasourceRequiresReadAccess()
{ {
// Setup mocks for a user who has write access to "wikipedia" // Setup mocks for a user who has read access to "wikipedia"
// and read access to "buzzfeed" // and no access to "buzzfeed"
expectAuthorizationTokenCheck(Users.WIKI_WRITER); expectAuthorizationTokenCheck(Users.WIKI_READER);
// Setup mocks to return completed, active, known, pending and running tasks // Setup mocks to return completed, active, known, pending and running tasks
EasyMock.expect(taskStorageQueryAdapter.getCompletedTaskInfoByCreatedTimeDuration(null, null, null)).andStubReturn( EasyMock.expect(taskStorageQueryAdapter.getCompletedTaskInfoByCreatedTimeDuration(null, null, null)).andStubReturn(
@ -949,7 +947,7 @@ public class OverlordResourceTest
workerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter
); );
// Verify that only the tasks of write access datasource are returned // Verify that only the tasks of read access datasource are returned
expectedException.expect(WebApplicationException.class); expectedException.expect(WebApplicationException.class);
overlordResource.getTasks(null, Datasources.BUZZFEED, null, null, null, req); overlordResource.getTasks(null, Datasources.BUZZFEED, null, null, null, req);
} }
@ -1042,7 +1040,7 @@ public class OverlordResourceTest
@Test @Test
public void testTaskPostDeniesDatasourceReadUser() public void testTaskPostDeniesDatasourceReadUser()
{ {
expectAuthorizationTokenCheck(Users.WIKI_WRITER); expectAuthorizationTokenCheck(Users.WIKI_READER);
EasyMock.replay( EasyMock.replay(
taskRunner, taskRunner,
@ -1054,7 +1052,7 @@ public class OverlordResourceTest
); );
// Verify that taskPost fails for user who has only datasource read access // Verify that taskPost fails for user who has only datasource read access
Task task = NoopTask.create(Datasources.BUZZFEED); Task task = NoopTask.create(Datasources.WIKIPEDIA);
expectedException.expect(ForbiddenException.class); expectedException.expect(ForbiddenException.class);
expectedException.expect(ForbiddenException.class); expectedException.expect(ForbiddenException.class);
overlordResource.taskPost(task, req); overlordResource.taskPost(task, req);
@ -1516,8 +1514,8 @@ public class OverlordResourceTest
private static class Users private static class Users
{ {
private static final String DRUID = "druid"; private static final String DRUID = "druid";
private static final String WIKI_WRITER = "Wiki Writer"; private static final String WIKI_READER = "Wiki Reader";
private static final String BUZZ_WRITER = "Buzz Writer"; private static final String BUZZ_READER = "Buzz Reader";
} }
/** /**

View File

@ -72,18 +72,18 @@ public class SupervisorResourceFilterTest
} }
@Test @Test
public void testGetWhenUserHasWriteAccess() public void testGetWhenUserHasReadAccess()
{ {
setExpectations("/druid/indexer/v1/supervisor/datasource1", "GET", "datasource1", Action.WRITE, true); setExpectations("/druid/indexer/v1/supervisor/datasource1", "GET", "datasource1", Action.READ, true);
ContainerRequest filteredRequest = resourceFilter.filter(containerRequest); ContainerRequest filteredRequest = resourceFilter.filter(containerRequest);
Assert.assertNotNull(filteredRequest); Assert.assertNotNull(filteredRequest);
verifyMocks(); verifyMocks();
} }
@Test @Test
public void testGetWhenUserHasNoWriteAccess() public void testGetWhenUserHasNoReadAccess()
{ {
setExpectations("/druid/indexer/v1/supervisor/datasource1", "GET", "datasource1", Action.WRITE, false); setExpectations("/druid/indexer/v1/supervisor/datasource1", "GET", "datasource1", Action.READ, false);
ForbiddenException expected = null; ForbiddenException expected = null;
try { try {

View File

@ -30,12 +30,10 @@ import org.apache.druid.indexing.overlord.supervisor.autoscaler.SupervisorTaskAu
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.TestHelper;
import org.apache.druid.server.security.Access; import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.Authorizer; import org.apache.druid.server.security.Authorizer;
import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ResourceType;
import org.easymock.Capture; import org.easymock.Capture;
import org.easymock.EasyMock; import org.easymock.EasyMock;
import org.easymock.EasyMockRunner; import org.easymock.EasyMockRunner;
@ -94,14 +92,7 @@ public class SupervisorResourceTest extends EasyMockSupport
@Override @Override
public Authorizer getAuthorizer(String name) public Authorizer getAuthorizer(String name)
{ {
// Create an Authorizer that only allows Datasource WRITE requests
// because all SupervisorResource APIs must only check Datasource WRITE access
return (authenticationResult, resource, action) -> { return (authenticationResult, resource, action) -> {
if (!resource.getType().equals(ResourceType.DATASOURCE)
|| action != Action.WRITE) {
return new Access(false);
}
if (authenticationResult.getIdentity().equals("druid")) { if (authenticationResult.getIdentity().equals("druid")) {
return Access.OK; return Access.OK;
} else { } else {

View File

@ -92,11 +92,11 @@ public class SeekableStreamIndexTaskRunnerAuthTest
final String username = authenticationResult.getIdentity(); final String username = authenticationResult.getIdentity();
// Allow access to a Datasource if // Allow access to a Datasource if
// - any user requests Read access // - Datasource Read User requests Read access
// - or, Datasource Write User requests Write access // - or, Datasource Write User requests Write access
if (resource.getType().equals(ResourceType.DATASOURCE)) { if (resource.getType().equals(ResourceType.DATASOURCE)) {
return new Access( return new Access(
action == Action.READ (action == Action.READ && username.equals(Users.DATASOURCE_READ))
|| (action == Action.WRITE && username.equals(Users.DATASOURCE_WRITE)) || (action == Action.WRITE && username.equals(Users.DATASOURCE_WRITE))
); );
} }
@ -118,13 +118,6 @@ public class SeekableStreamIndexTaskRunnerAuthTest
null null
); );
SeekableStreamIndexTaskTuningConfig tuningConfig = mock(SeekableStreamIndexTaskTuningConfig.class); SeekableStreamIndexTaskTuningConfig tuningConfig = mock(SeekableStreamIndexTaskTuningConfig.class);
/*expect(tuningConfig.getIntermediateHandoffPeriod()).andReturn(Period.minutes(10));
expect(tuningConfig.isLogParseExceptions()).andReturn(false);
expect(tuningConfig.getMaxParseExceptions()).andReturn(1000);
expect(tuningConfig.getMaxSavedParseExceptions()).andReturn(100);
replay(tuningConfig);*/
SeekableStreamIndexTaskIOConfig<String, String> ioConfig = new TestSeekableStreamIndexTaskIOConfig(); SeekableStreamIndexTaskIOConfig<String, String> ioConfig = new TestSeekableStreamIndexTaskIOConfig();
// Initiliaze task and task runner // Initiliaze task and task runner
@ -136,7 +129,7 @@ public class SeekableStreamIndexTaskRunnerAuthTest
@Test @Test
public void testGetStatusHttp() public void testGetStatusHttp()
{ {
verifyOnlyDatasourceWriteUserCanAccess(taskRunner::getStatusHTTP); verifyOnlyDatasourceReadUserCanAccess(taskRunner::getStatusHTTP);
} }
@Test @Test
@ -180,7 +173,7 @@ public class SeekableStreamIndexTaskRunnerAuthTest
@Test @Test
public void testGetEndOffsets() public void testGetEndOffsets()
{ {
verifyOnlyDatasourceWriteUserCanAccess(taskRunner::getCurrentOffsets); verifyOnlyDatasourceReadUserCanAccess(taskRunner::getCurrentOffsets);
} }
@Test @Test
@ -199,7 +192,7 @@ public class SeekableStreamIndexTaskRunnerAuthTest
@Test @Test
public void testGetCheckpointsHttp() public void testGetCheckpointsHttp()
{ {
verifyOnlyDatasourceWriteUserCanAccess(taskRunner::getCheckpointsHTTP); verifyOnlyDatasourceReadUserCanAccess(taskRunner::getCheckpointsHTTP);
} }
@ -219,6 +212,22 @@ public class SeekableStreamIndexTaskRunnerAuthTest
method.accept(blockedRequest); method.accept(blockedRequest);
} }
private void verifyOnlyDatasourceReadUserCanAccess(
Consumer<HttpServletRequest> method
)
{
// Verify that datasource read user can access
HttpServletRequest allowedRequest = createRequest(Users.DATASOURCE_READ);
replay(allowedRequest);
method.accept(allowedRequest);
// Verify that no other user can access
HttpServletRequest blockedRequest = createRequest(Users.DATASOURCE_WRITE);
replay(blockedRequest);
expectedException.expect(ForbiddenException.class);
method.accept(blockedRequest);
}
private HttpServletRequest createRequest(String username) private HttpServletRequest createRequest(String username)
{ {
HttpServletRequest request = mock(HttpServletRequest.class); HttpServletRequest request = mock(HttpServletRequest.class);

View File

@ -53,41 +53,41 @@ description: Admin users
uniqueMember: uid=admin,ou=Users,dc=example,dc=org uniqueMember: uid=admin,ou=Users,dc=example,dc=org
uniqueMember: uid=druid_system,ou=Users,dc=example,dc=org uniqueMember: uid=druid_system,ou=Users,dc=example,dc=org
dn: uid=datasourceReadOnlyUser,ou=Users,dc=example,dc=org dn: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org
uid: datasourceReadOnlyUser uid: datasourceOnlyUser
cn: datasourceReadOnlyUser cn: datasourceOnlyUser
sn: datasourceReadOnlyUser sn: datasourceOnlyUser
objectClass: top objectClass: top
objectClass: posixAccount objectClass: posixAccount
objectClass: inetOrgPerson objectClass: inetOrgPerson
homeDirectory: /home/datasourceReadOnlyUser homeDirectory: /home/datasourceOnlyUser
uidNumber: 3 uidNumber: 3
gidNumber: 3 gidNumber: 3
userPassword: helloworld userPassword: helloworld
dn: cn=datasourceReadOnlyGroup,ou=Groups,dc=example,dc=org dn: cn=datasourceOnlyGroup,ou=Groups,dc=example,dc=org
objectClass: groupOfUniqueNames objectClass: groupOfUniqueNames
cn: datasourceReadOnlyGroup cn: datasourceOnlyGroup
description: datasourceReadOnlyGroup users description: datasourceOnlyGroup users
uniqueMember: uid=datasourceReadOnlyUser,ou=Users,dc=example,dc=org uniqueMember: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org
dn: uid=datasourceReadWithStateUser,ou=Users,dc=example,dc=org dn: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org
uid: datasourceReadWithStateUser uid: datasourceWithStateUser
cn: datasourceReadWithStateUser cn: datasourceWithStateUser
sn: datasourceReadWithStateUser sn: datasourceWithStateUser
objectClass: top objectClass: top
objectClass: posixAccount objectClass: posixAccount
objectClass: inetOrgPerson objectClass: inetOrgPerson
homeDirectory: /home/datasourceReadWithStateUser homeDirectory: /home/datasourceWithStateUser
uidNumber: 4 uidNumber: 4
gidNumber: 4 gidNumber: 4
userPassword: helloworld userPassword: helloworld
dn: cn=datasourceReadWithStateGroup,ou=Groups,dc=example,dc=org dn: cn=datasourceWithStateGroup,ou=Groups,dc=example,dc=org
objectClass: groupOfUniqueNames objectClass: groupOfUniqueNames
cn: datasourceReadWithStateGroup cn: datasourceWithStateGroup
description: datasourceReadWithStateGroup users description: datasourceWithStateGroup users
uniqueMember: uid=datasourceReadWithStateUser,ou=Users,dc=example,dc=org uniqueMember: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org
dn: uid=stateOnlyUser,ou=Users,dc=example,dc=org dn: uid=stateOnlyUser,ou=Users,dc=example,dc=org
uid: stateOnlyUser uid: stateOnlyUser
@ -137,38 +137,20 @@ uidNumber: 7
gidNumber: 7 gidNumber: 7
userPassword: helloworld userPassword: helloworld
dn: uid=datasourceReadAndSysUser,ou=Users,dc=example,dc=org dn: uid=datasourceAndSysUser,ou=Users,dc=example,dc=org
uid: datasourceReadAndSysUser uid: datasourceAndSysUser
cn: datasourceReadAndSysUser cn: datasourceAndSysUser
sn: datasourceReadAndSysUser sn: datasourceAndSysUser
objectClass: top objectClass: top
objectClass: posixAccount objectClass: posixAccount
objectClass: inetOrgPerson objectClass: inetOrgPerson
homeDirectory: /home/datasourceReadAndSysUser homeDirectory: /home/datasourceAndSysUser
uidNumber: 8 uidNumber: 8
gidNumber: 8 gidNumber: 8
userPassword: helloworld userPassword: helloworld
dn: cn=datasourceReadWithSysGroup,ou=Groups,dc=example,dc=org dn: cn=datasourceWithSysGroup,ou=Groups,dc=example,dc=org
objectClass: groupOfUniqueNames objectClass: groupOfUniqueNames
cn: datasourceReadWithSysGroup cn: datasourceWithSysGroup
description: datasourceReadWithSysGroup users description: datasourceWithSysGroup users
uniqueMember: uid=datasourceReadAndSysUser,ou=Users,dc=example,dc=org uniqueMember: uid=datasourceAndSysUser,ou=Users,dc=example,dc=org
dn: uid=datasourceWriteAndSysUser,ou=Users,dc=example,dc=org
uid: datasourceWriteAndSysUser
cn: datasourceWriteAndSysUser
sn: datasourceWriteAndSysUser
objectClass: top
objectClass: posixAccount
objectClass: inetOrgPerson
homeDirectory: /home/datasourceWriteAndSysUser
uidNumber: 8
gidNumber: 8
userPassword: helloworld
dn: cn=datasourceWriteWithSysGroup,ou=Groups,dc=example,dc=org
objectClass: groupOfUniqueNames
cn: datasourceWriteWithSysGroup
description: datasourceWriteWithSysGroup users
uniqueMember: uid=datasourceWriteAndSysUser,ou=Users,dc=example,dc=org

View File

@ -98,7 +98,7 @@ public abstract class AbstractAuthConfigurationTest
* create a ResourceAction set of permissions that can only read a 'auth_test' datasource, for Authorizer * create a ResourceAction set of permissions that can only read a 'auth_test' datasource, for Authorizer
* implementations which use ResourceAction pattern matching * implementations which use ResourceAction pattern matching
*/ */
protected static final List<ResourceAction> DATASOURCE_READ_ONLY_PERMISSIONS = Collections.singletonList( protected static final List<ResourceAction> DATASOURCE_ONLY_PERMISSIONS = Collections.singletonList(
new ResourceAction( new ResourceAction(
new Resource("auth_test", ResourceType.DATASOURCE), new Resource("auth_test", ResourceType.DATASOURCE),
Action.READ Action.READ
@ -109,7 +109,7 @@ public abstract class AbstractAuthConfigurationTest
* create a ResourceAction set of permissions that can only read 'auth_test' + partial SYSTEM_TABLE, for Authorizer * create a ResourceAction set of permissions that can only read 'auth_test' + partial SYSTEM_TABLE, for Authorizer
* implementations which use ResourceAction pattern matching * implementations which use ResourceAction pattern matching
*/ */
protected static final List<ResourceAction> DATASOURCE_READ_SYS_PERMISSIONS = ImmutableList.of( protected static final List<ResourceAction> DATASOURCE_SYS_PERMISSIONS = ImmutableList.of(
new ResourceAction( new ResourceAction(
new Resource("auth_test", ResourceType.DATASOURCE), new Resource("auth_test", ResourceType.DATASOURCE),
Action.READ Action.READ
@ -134,39 +134,11 @@ public abstract class AbstractAuthConfigurationTest
) )
); );
/**
* create a ResourceAction set of permissions that can write datasource 'auth_test'
*/
protected static final List<ResourceAction> DATASOURCE_WRITE_SYS_PERMISSIONS = ImmutableList.of(
new ResourceAction(
new Resource("auth_test", ResourceType.DATASOURCE),
Action.WRITE
),
new ResourceAction(
new Resource("segments", ResourceType.SYSTEM_TABLE),
Action.READ
),
// test missing state permission but having servers permission
new ResourceAction(
new Resource("servers", ResourceType.SYSTEM_TABLE),
Action.READ
),
// test missing state permission but having server_segments permission
new ResourceAction(
new Resource("server_segments", ResourceType.SYSTEM_TABLE),
Action.READ
),
new ResourceAction(
new Resource("tasks", ResourceType.SYSTEM_TABLE),
Action.READ
)
);
/** /**
* create a ResourceAction set of permissions that can only read 'auth_test' + STATE + SYSTEM_TABLE read access, for * create a ResourceAction set of permissions that can only read 'auth_test' + STATE + SYSTEM_TABLE read access, for
* Authorizer implementations which use ResourceAction pattern matching * Authorizer implementations which use ResourceAction pattern matching
*/ */
protected static final List<ResourceAction> DATASOURCE_READ_SYS_STATE_PERMISSIONS = ImmutableList.of( protected static final List<ResourceAction> DATASOURCE_SYS_STATE_PERMISSIONS = ImmutableList.of(
new ResourceAction( new ResourceAction(
new Resource("auth_test", ResourceType.DATASOURCE), new Resource("auth_test", ResourceType.DATASOURCE),
Action.READ Action.READ
@ -215,18 +187,16 @@ public abstract class AbstractAuthConfigurationTest
protected CoordinatorResourceTestClient coordinatorClient; protected CoordinatorResourceTestClient coordinatorClient;
protected HttpClient adminClient; protected HttpClient adminClient;
protected HttpClient datasourceReadOnlyUserClient; protected HttpClient datasourceOnlyUserClient;
protected HttpClient datasourceReadAndSysUserClient; protected HttpClient datasourceAndSysUserClient;
protected HttpClient datasourceWriteAndSysUserClient; protected HttpClient datasourceWithStateUserClient;
protected HttpClient datasourceReadWithStateUserClient;
protected HttpClient stateOnlyUserClient; protected HttpClient stateOnlyUserClient;
protected HttpClient internalSystemClient; protected HttpClient internalSystemClient;
protected abstract void setupDatasourceReadOnlyUser() throws Exception; protected abstract void setupDatasourceOnlyUser() throws Exception;
protected abstract void setupDatasourceReadAndSysTableUser() throws Exception; protected abstract void setupDatasourceAndSysTableUser() throws Exception;
protected abstract void setupDatasourceWriteAndSysTableUser() throws Exception; protected abstract void setupDatasourceAndSysAndStateUser() throws Exception;
protected abstract void setupDatasourceReadAndSysAndStateUser() throws Exception;
protected abstract void setupSysTableAndStateOnlyUser() throws Exception; protected abstract void setupSysTableAndStateOnlyUser() throws Exception;
protected abstract void setupTestSpecificHttpClients() throws Exception; protected abstract void setupTestSpecificHttpClients() throws Exception;
protected abstract String getAuthenticatorName(); protected abstract String getAuthenticatorName();
@ -272,44 +242,44 @@ public abstract class AbstractAuthConfigurationTest
} }
@Test @Test
public void test_systemSchemaAccess_datasourceReadOnlyUser() throws Exception public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception
{ {
// check that we can access a datasource-permission restricted resource on the broker // check that we can access a datasource-permission restricted resource on the broker
HttpUtil.makeRequest( HttpUtil.makeRequest(
datasourceReadOnlyUserClient, datasourceOnlyUserClient,
HttpMethod.GET, HttpMethod.GET,
config.getBrokerUrl() + "/druid/v2/datasources/auth_test", config.getBrokerUrl() + "/druid/v2/datasources/auth_test",
null null
); );
// as user that can only read auth_test // as user that can only read auth_test
LOG.info("Checking sys.segments query as datasourceReadOnlyUser..."); LOG.info("Checking sys.segments query as datasourceOnlyUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadOnlyUserClient, datasourceOnlyUserClient,
SYS_SCHEMA_SEGMENTS_QUERY, SYS_SCHEMA_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
); );
LOG.info("Checking sys.servers query as datasourceReadOnlyUser..."); LOG.info("Checking sys.servers query as datasourceOnlyUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadOnlyUserClient, datasourceOnlyUserClient,
SYS_SCHEMA_SERVERS_QUERY, SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
); );
LOG.info("Checking sys.server_segments query as datasourceReadOnlyUser..."); LOG.info("Checking sys.server_segments query as datasourceOnlyUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadOnlyUserClient, datasourceOnlyUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY, SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
); );
LOG.info("Checking sys.tasks query as datasourceReadOnlyUser..."); LOG.info("Checking sys.tasks query as datasourceOnlyUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadOnlyUserClient, datasourceOnlyUserClient,
SYS_SCHEMA_TASKS_QUERY, SYS_SCHEMA_TASKS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Allowed:false, Message:\"}"
@ -317,119 +287,83 @@ public abstract class AbstractAuthConfigurationTest
} }
@Test @Test
public void test_systemSchemaAccess_datasourceReadAndSysUser() throws Exception public void test_systemSchemaAccess_datasourceAndSysUser() throws Exception
{ {
// check that we can access a datasource-permission restricted resource on the broker // check that we can access a datasource-permission restricted resource on the broker
HttpUtil.makeRequest( HttpUtil.makeRequest(
datasourceReadAndSysUserClient, datasourceAndSysUserClient,
HttpMethod.GET, HttpMethod.GET,
config.getBrokerUrl() + "/druid/v2/datasources/auth_test", config.getBrokerUrl() + "/druid/v2/datasources/auth_test",
null null
); );
// as user that can only read auth_test // as user that can only read auth_test
LOG.info("Checking sys.segments query as datasourceReadAndSysUser..."); LOG.info("Checking sys.segments query as datasourceAndSysUser...");
verifySystemSchemaQuery( verifySystemSchemaQuery(
datasourceReadAndSysUserClient, datasourceAndSysUserClient,
SYS_SCHEMA_SEGMENTS_QUERY, SYS_SCHEMA_SEGMENTS_QUERY,
adminSegments.stream() adminSegments.stream()
.filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource"))) .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource")))
.collect(Collectors.toList()) .collect(Collectors.toList())
); );
LOG.info("Checking sys.servers query as datasourceReadAndSysUser..."); LOG.info("Checking sys.servers query as datasourceAndSysUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadAndSysUserClient, datasourceAndSysUserClient,
SYS_SCHEMA_SERVERS_QUERY, SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
); );
LOG.info("Checking sys.server_segments query as datasourceReadAndSysUser..."); LOG.info("Checking sys.server_segments query as datasourceAndSysUser...");
verifySystemSchemaQueryFailure( verifySystemSchemaQueryFailure(
datasourceReadAndSysUserClient, datasourceAndSysUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY, SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
); );
// Verify that sys.tasks result is empty as it is filtered by Datasource WRITE access LOG.info("Checking sys.tasks query as datasourceAndSysUser...");
LOG.info("Checking sys.tasks query as datasourceReadAndSysUser...");
verifySystemSchemaQuery( verifySystemSchemaQuery(
datasourceReadAndSysUserClient, datasourceAndSysUserClient,
SYS_SCHEMA_TASKS_QUERY,
Collections.emptyList()
);
}
@Test
public void test_systemSchemaAccess_datasourceWriteAndSysUser() throws Exception
{
// Verify that sys.segments result is empty as it is filtered by Datasource READ access
LOG.info("Checking sys.segments query as datasourceWriteAndSysUser...");
verifySystemSchemaQuery(
datasourceWriteAndSysUserClient,
SYS_SCHEMA_SEGMENTS_QUERY,
Collections.emptyList()
);
LOG.info("Checking sys.servers query as datasourceWriteAndSysUser...");
verifySystemSchemaQueryFailure(
datasourceWriteAndSysUserClient,
SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
);
LOG.info("Checking sys.server_segments query as datasourceWriteAndSysUser...");
verifySystemSchemaQueryFailure(
datasourceWriteAndSysUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}"
);
LOG.info("Checking sys.tasks query as datasourceWriteAndSysUser...");
verifySystemSchemaQuery(
datasourceWriteAndSysUserClient,
SYS_SCHEMA_TASKS_QUERY, SYS_SCHEMA_TASKS_QUERY,
adminTasks.stream() adminTasks.stream()
.filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource"))) .filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource")))
.collect(Collectors.toList()) .collect(Collectors.toList())
); );
} }
@Test @Test
public void test_systemSchemaAccess_datasourceReadAndSysWithStateUser() throws Exception public void test_systemSchemaAccess_datasourceAndSysWithStateUser() throws Exception
{ {
// check that we can access a state-permission restricted resource on the broker // check that we can access a state-permission restricted resource on the broker
HttpUtil.makeRequest( HttpUtil.makeRequest(
datasourceReadWithStateUserClient, datasourceWithStateUserClient,
HttpMethod.GET, HttpMethod.GET,
config.getBrokerUrl() + "/status", config.getBrokerUrl() + "/status",
null null
); );
// as user that can read auth_test and STATE // as user that can read auth_test and STATE
LOG.info("Checking sys.segments query as datasourceReadWithStateUser..."); LOG.info("Checking sys.segments query as datasourceWithStateUser...");
verifySystemSchemaQuery( verifySystemSchemaQuery(
datasourceReadWithStateUserClient, datasourceWithStateUserClient,
SYS_SCHEMA_SEGMENTS_QUERY, SYS_SCHEMA_SEGMENTS_QUERY,
adminSegments.stream() adminSegments.stream()
.filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource"))) .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource")))
.collect(Collectors.toList()) .collect(Collectors.toList())
); );
LOG.info("Checking sys.servers query as datasourceReadWithStateUser..."); LOG.info("Checking sys.servers query as datasourceWithStateUser...");
verifySystemSchemaServerQuery( verifySystemSchemaServerQuery(
datasourceReadWithStateUserClient, datasourceWithStateUserClient,
SYS_SCHEMA_SERVERS_QUERY, SYS_SCHEMA_SERVERS_QUERY,
adminServers adminServers
); );
LOG.info("Checking sys.server_segments query as datasourceReadWithStateUser..."); LOG.info("Checking sys.server_segments query as datasourceWithStateUser...");
verifySystemSchemaQuery( verifySystemSchemaQuery(
datasourceReadWithStateUserClient, datasourceWithStateUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY, SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
adminServerSegments.stream() adminServerSegments.stream()
.filter((serverSegmentEntry) -> ((String) serverSegmentEntry.get("segment_id")).contains( .filter((serverSegmentEntry) -> ((String) serverSegmentEntry.get("segment_id")).contains(
@ -437,12 +371,13 @@ public abstract class AbstractAuthConfigurationTest
.collect(Collectors.toList()) .collect(Collectors.toList())
); );
// Verify that sys.tasks result is empty as it is filtered by Datasource WRITE access LOG.info("Checking sys.tasks query as datasourceWithStateUser...");
LOG.info("Checking sys.tasks query as datasourceReadWithStateUser...");
verifySystemSchemaQuery( verifySystemSchemaQuery(
datasourceReadWithStateUserClient, datasourceWithStateUserClient,
SYS_SCHEMA_TASKS_QUERY, SYS_SCHEMA_TASKS_QUERY,
Collections.emptyList() adminTasks.stream()
.filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource")))
.collect(Collectors.toList())
); );
} }
@ -565,10 +500,9 @@ public abstract class AbstractAuthConfigurationTest
protected void setupHttpClientsAndUsers() throws Exception protected void setupHttpClientsAndUsers() throws Exception
{ {
setupHttpClients(); setupHttpClients();
setupDatasourceReadOnlyUser(); setupDatasourceOnlyUser();
setupDatasourceReadAndSysTableUser(); setupDatasourceAndSysTableUser();
setupDatasourceWriteAndSysTableUser(); setupDatasourceAndSysAndStateUser();
setupDatasourceReadAndSysAndStateUser();
setupSysTableAndStateOnlyUser(); setupSysTableAndStateOnlyUser();
} }
@ -832,23 +766,18 @@ public abstract class AbstractAuthConfigurationTest
httpClient httpClient
); );
datasourceReadOnlyUserClient = new CredentialedHttpClient( datasourceOnlyUserClient = new CredentialedHttpClient(
new BasicCredentials("datasourceReadOnlyUser", "helloworld"), new BasicCredentials("datasourceOnlyUser", "helloworld"),
httpClient httpClient
); );
datasourceReadAndSysUserClient = new CredentialedHttpClient( datasourceAndSysUserClient = new CredentialedHttpClient(
new BasicCredentials("datasourceReadAndSysUser", "helloworld"), new BasicCredentials("datasourceAndSysUser", "helloworld"),
httpClient httpClient
); );
datasourceWriteAndSysUserClient = new CredentialedHttpClient( datasourceWithStateUserClient = new CredentialedHttpClient(
new BasicCredentials("datasourceWriteAndSysUser", "helloworld"), new BasicCredentials("datasourceWithStateUser", "helloworld"),
httpClient
);
datasourceReadWithStateUserClient = new CredentialedHttpClient(
new BasicCredentials("datasourceReadWithStateUser", "helloworld"),
httpClient httpClient
); );

View File

@ -70,50 +70,38 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest
} }
@Override @Override
protected void setupDatasourceReadOnlyUser() throws Exception protected void setupDatasourceOnlyUser() throws Exception
{ {
createUserAndRoleWithPermissions( createUserAndRoleWithPermissions(
adminClient, adminClient,
"datasourceReadOnlyUser", "datasourceOnlyUser",
"helloworld", "helloworld",
"datasourceReadOnlyRole", "datasourceOnlyRole",
DATASOURCE_READ_ONLY_PERMISSIONS DATASOURCE_ONLY_PERMISSIONS
); );
} }
@Override @Override
protected void setupDatasourceReadAndSysTableUser() throws Exception protected void setupDatasourceAndSysTableUser() throws Exception
{ {
createUserAndRoleWithPermissions( createUserAndRoleWithPermissions(
adminClient, adminClient,
"datasourceReadAndSysUser", "datasourceAndSysUser",
"helloworld", "helloworld",
"datasourceReadAndSysRole", "datasourceAndSysRole",
DATASOURCE_READ_SYS_PERMISSIONS DATASOURCE_SYS_PERMISSIONS
); );
} }
@Override @Override
protected void setupDatasourceWriteAndSysTableUser() throws Exception protected void setupDatasourceAndSysAndStateUser() throws Exception
{ {
createUserAndRoleWithPermissions( createUserAndRoleWithPermissions(
adminClient, adminClient,
"datasourceWriteAndSysUser", "datasourceWithStateUser",
"helloworld", "helloworld",
"datasourceWriteAndSysRole", "datasourceWithStateRole",
DATASOURCE_WRITE_SYS_PERMISSIONS DATASOURCE_SYS_STATE_PERMISSIONS
);
}
@Override
protected void setupDatasourceReadAndSysAndStateUser() throws Exception
{
createUserAndRoleWithPermissions(
adminClient,
"datasourceReadWithStateUser",
"helloworld",
"datasourceReadWithStateRole",
DATASOURCE_READ_SYS_STATE_PERMISSIONS
); );
} }

View File

@ -120,38 +120,29 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT
@Override @Override
protected void setupDatasourceReadOnlyUser() throws Exception protected void setupDatasourceOnlyUser() throws Exception
{ {
createRoleWithPermissionsAndGroupMapping( createRoleWithPermissionsAndGroupMapping(
"datasourceReadOnlyGroup", "datasourceOnlyGroup",
ImmutableMap.of("datasourceReadOnlyRole", DATASOURCE_READ_ONLY_PERMISSIONS) ImmutableMap.of("datasourceOnlyRole", DATASOURCE_ONLY_PERMISSIONS)
); );
} }
@Override @Override
protected void setupDatasourceReadAndSysTableUser() throws Exception protected void setupDatasourceAndSysTableUser() throws Exception
{ {
createRoleWithPermissionsAndGroupMapping( createRoleWithPermissionsAndGroupMapping(
"datasourceReadWithSysGroup", "datasourceWithSysGroup",
ImmutableMap.of("datasourceReadWithSysRole", DATASOURCE_READ_SYS_PERMISSIONS) ImmutableMap.of("datasourceWithSysRole", DATASOURCE_SYS_PERMISSIONS)
); );
} }
@Override @Override
protected void setupDatasourceWriteAndSysTableUser() throws Exception protected void setupDatasourceAndSysAndStateUser() throws Exception
{ {
createRoleWithPermissionsAndGroupMapping( createRoleWithPermissionsAndGroupMapping(
"datasourceWriteWithSysGroup", "datasourceWithStateGroup",
ImmutableMap.of("datasourceWriteWithSysRole", DATASOURCE_WRITE_SYS_PERMISSIONS) ImmutableMap.of("datasourceWithStateRole", DATASOURCE_SYS_STATE_PERMISSIONS)
);
}
@Override
protected void setupDatasourceReadAndSysAndStateUser() throws Exception
{
createRoleWithPermissionsAndGroupMapping(
"datasourceReadWithStateGroup",
ImmutableMap.of("datasourceReadWithStateRole", DATASOURCE_READ_SYS_STATE_PERMISSIONS)
); );
} }

View File

@ -872,7 +872,7 @@ public class SystemSchema extends AbstractSchema
); );
Function<TaskStatusPlus, Iterable<ResourceAction>> raGenerator = task -> Collections.singletonList( Function<TaskStatusPlus, Iterable<ResourceAction>> raGenerator = task -> Collections.singletonList(
AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR.apply(task.getDataSource())); AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(task.getDataSource()));
final Iterable<TaskStatusPlus> authorizedTasks = AuthorizationUtils.filterAuthorizedResources( final Iterable<TaskStatusPlus> authorizedTasks = AuthorizationUtils.filterAuthorizedResources(
authenticationResult, authenticationResult,
@ -1014,7 +1014,7 @@ public class SystemSchema extends AbstractSchema
); );
Function<SupervisorStatus, Iterable<ResourceAction>> raGenerator = supervisor -> Collections.singletonList( Function<SupervisorStatus, Iterable<ResourceAction>> raGenerator = supervisor -> Collections.singletonList(
AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR.apply(supervisor.getSource())); AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(supervisor.getSource()));
final Iterable<SupervisorStatus> authorizedSupervisors = AuthorizationUtils.filterAuthorizedResources( final Iterable<SupervisorStatus> authorizedSupervisors = AuthorizationUtils.filterAuthorizedResources(
authenticationResult, authenticationResult,

View File

@ -1244,15 +1244,15 @@ public class SystemSchemaTest extends CalciteTestBase
EasyMock.replay(client, request, responseHandler); EasyMock.replay(client, request, responseHandler);
// Verify that no row is returned for Datasource Read user // Verify that no row is returned for Datasource Write user
List<Object[]> rows = tasksTable List<Object[]> rows = tasksTable
.scan(createDataContext(Users.DATASOURCE_READ)) .scan(createDataContext(Users.DATASOURCE_WRITE))
.toList(); .toList();
Assert.assertTrue(rows.isEmpty()); Assert.assertTrue(rows.isEmpty());
// Verify that 2 rows are is returned for Datasource Write user // Verify that 2 rows are returned for Datasource Read user
rows = tasksTable rows = tasksTable
.scan(createDataContext(Users.DATASOURCE_WRITE)) .scan(createDataContext(Users.DATASOURCE_READ))
.toList(); .toList();
Assert.assertEquals(2, rows.size()); Assert.assertEquals(2, rows.size());
@ -1363,15 +1363,15 @@ public class SystemSchemaTest extends CalciteTestBase
EasyMock.replay(client, request, responseHandler); EasyMock.replay(client, request, responseHandler);
// Verify that no row is returned for Datasource Read user // Verify that no row is returned for Datasource Write user
List<Object[]> rows = supervisorTable List<Object[]> rows = supervisorTable
.scan(createDataContext(Users.DATASOURCE_READ)) .scan(createDataContext(Users.DATASOURCE_WRITE))
.toList(); .toList();
Assert.assertTrue(rows.isEmpty()); Assert.assertTrue(rows.isEmpty());
// Verify that 1 row is returned for Datasource Write user // Verify that 1 row is returned for Datasource Write user
rows = supervisorTable rows = supervisorTable
.scan(createDataContext(Users.DATASOURCE_WRITE)) .scan(createDataContext(Users.DATASOURCE_READ))
.toList(); .toList();
Assert.assertEquals(1, rows.size()); Assert.assertEquals(1, rows.size());
@ -1451,13 +1451,13 @@ public class SystemSchemaTest extends CalciteTestBase
final String username = authenticationResult.getIdentity(); final String username = authenticationResult.getIdentity();
// Allow access to a Datasource if // Allow access to a Datasource if
// - any user requests Read access
// - Super User or Datasource Write User requests Write access // - Super User or Datasource Write User requests Write access
// - Super User or Datasource Read User requests Read access
if (resource.getType().equals(ResourceType.DATASOURCE)) { if (resource.getType().equals(ResourceType.DATASOURCE)) {
return new Access( return new Access(
action == Action.READ username.equals(Users.SUPER)
|| username.equals(Users.SUPER) || (action == Action.READ && username.equals(Users.DATASOURCE_READ))
|| username.equals(Users.DATASOURCE_WRITE) || (action == Action.WRITE && username.equals(Users.DATASOURCE_WRITE))
); );
} }