HADOOP-6568. Adds authorization for the default servlets. Contributed by Vinod Kumar Vavilapalli.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@916467 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Devaraj Das 2010-02-25 21:39:38 +00:00
parent 37214591e9
commit 7785232da5
13 changed files with 345 additions and 9 deletions

View File

@ -62,6 +62,9 @@ Trunk (unreleased changes)
to change the default 1 MB, the maximum size when large IPC handler to change the default 1 MB, the maximum size when large IPC handler
response buffer is reset. (suresh) response buffer is reset. (suresh)
HADOOP-6568. Adds authorization for the default servlets.
(Vinod Kumar Vavilapalli via ddas)
IMPROVEMENTS IMPROVEMENTS
HADOOP-6283. Improve the exception messages thrown by HADOOP-6283. Improve the exception messages thrown by

View File

@ -53,6 +53,16 @@
ordering of the filters.</description> ordering of the filters.</description>
</property> </property>
<property>
<name>hadoop.cluster.administrators</name>
<property>Users and/or groups who are designated as the administrators of a
hadoop cluster. For specifying a list of users and groups the format to use
is "user1,user2 group1,group". If set to '*', it allows all users/groups to
do administrations operations of the cluster. If set to '', it allows none.
</property>
<value>${user.name}</value>
</property>
<property> <property>
<name>hadoop.security.authorization</name> <name>hadoop.security.authorization</name>
<value>false</value> <value>false</value>

View File

@ -52,6 +52,13 @@ private Configuration getConfFromContext() {
@Override @Override
public void doGet(HttpServletRequest request, HttpServletResponse response) public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
// Do the authorization
if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
response)) {
return;
}
String format = request.getParameter(FORMAT_PARAM); String format = request.getParameter(FORMAT_PARAM);
if (null == format) { if (null == format) {
format = FORMAT_XML; format = FORMAT_XML;

View File

@ -133,5 +133,12 @@ public class CommonConfigurationKeys {
public static final String HADOOP_SECURITY_GROUP_MAPPING = "hadoop.security.group.mapping"; public static final String HADOOP_SECURITY_GROUP_MAPPING = "hadoop.security.group.mapping";
public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS = "hadoop.security.groups.cache.secs"; public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS = "hadoop.security.groups.cache.secs";
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static final String HADOOP_SECURITY_AUTHORIZATION =
"hadoop.security.authorization";
/**
* ACL denoting the administrator ACLs for a hadoop cluster.
*/
public final static String HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY =
"hadoop.cluster.administrators";
} }

View File

@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.http;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.mortbay.jetty.servlet.DefaultServlet;
/**
* General servlet which is admin-authorized.
*
*/
public class AdminAuthorizedServlet extends DefaultServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// Do the authorization
if (HttpServer.hasAdministratorAccess(getServletContext(), request,
response)) {
// Authorization is done. Just call super.
super.doGet(request, response);
}
}
}

View File

@ -27,11 +27,11 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Random;
import javax.servlet.Filter; import javax.servlet.Filter;
import javax.servlet.FilterChain; import javax.servlet.FilterChain;
import javax.servlet.FilterConfig; import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.ServletRequest; import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse; import javax.servlet.ServletResponse;
@ -45,8 +45,11 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.metrics.MetricsServlet; import org.apache.hadoop.metrics.MetricsServlet;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.conf.ConfServlet; import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.mortbay.jetty.Connector; import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Handler; import org.mortbay.jetty.Handler;
@ -125,12 +128,13 @@ public HttpServer(String name, String bindAddress, int port,
webServer.setHandler(contexts); webServer.setHandler(contexts);
webAppContext = new WebAppContext(); webAppContext = new WebAppContext();
webAppContext.setDisplayName("WepAppsContext");
webAppContext.setContextPath("/"); webAppContext.setContextPath("/");
webAppContext.setWar(appDir + "/" + name); webAppContext.setWar(appDir + "/" + name);
webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
webServer.addHandler(webAppContext); webServer.addHandler(webAppContext);
addDefaultApps(contexts, appDir); addDefaultApps(contexts, appDir, conf);
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
final FilterInitializer[] initializers = getFilterInitializers(conf); final FilterInitializer[] initializers = getFilterInitializers(conf);
@ -182,19 +186,23 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) {
* @throws IOException * @throws IOException
*/ */
protected void addDefaultApps(ContextHandlerCollection parent, protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir) throws IOException { final String appDir, Configuration conf) throws IOException {
// set up the context for "/logs/" if "hadoop.log.dir" property is defined. // set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = System.getProperty("hadoop.log.dir"); String logDir = System.getProperty("hadoop.log.dir");
if (logDir != null) { if (logDir != null) {
Context logContext = new Context(parent, "/logs"); Context logContext = new Context(parent, "/logs");
logContext.setResourceBase(logDir); logContext.setResourceBase(logDir);
logContext.addServlet(DefaultServlet.class, "/"); logContext.addServlet(AdminAuthorizedServlet.class, "/");
logContext.setDisplayName("logs");
logContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
defaultContexts.put(logContext, true); defaultContexts.put(logContext, true);
} }
// set up the context for "/static/*" // set up the context for "/static/*"
Context staticContext = new Context(parent, "/static"); Context staticContext = new Context(parent, "/static");
staticContext.setResourceBase(appDir + "/static"); staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
staticContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
defaultContexts.put(staticContext, true); defaultContexts.put(staticContext, true);
} }
@ -279,6 +287,8 @@ public void addFilter(String name, String classname,
final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS); defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + webAppContext.getDisplayName());
final String[] ALL_URLS = { "/*" }; final String[] ALL_URLS = { "/*" };
for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) { for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) {
if (e.getValue()) { if (e.getValue()) {
@ -566,6 +576,50 @@ public String toString() {
: "Inactive HttpServer"; : "Inactive HttpServer";
} }
/**
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
* @param servletContext
* @param request
* @param response
* @return true if admin-authorized, false otherwise
* @throws IOException
*/
public static boolean hasAdministratorAccess(
ServletContext servletContext, HttpServletRequest request,
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
// If there is no authorization, anybody has administrator access.
if (!conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
return true;
}
String remoteUser = request.getRemoteUser();
if (remoteUser == null) {
return true;
}
String adminsAclString =
conf.get(
CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY,
"*");
AccessControlList adminsAcl = new AccessControlList(adminsAclString);
UserGroupInformation remoteUserUGI =
UserGroupInformation.createRemoteUser(remoteUser);
if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+ remoteUser + " is unauthorized to access this page. "
+ "Only superusers/supergroup \"" + adminsAclString
+ "\" can access this page.");
return false;
}
return true;
}
/** /**
* A very simple servlet to serve up a text representation of the current * A very simple servlet to serve up a text representation of the current
* stack traces. It both returns the stacks to the caller and logs them. * stack traces. It both returns the stacks to the caller and logs them.
@ -578,7 +632,13 @@ public static class StackServlet extends HttpServlet {
@Override @Override
public void doGet(HttpServletRequest request, HttpServletResponse response) public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
// Do the authorization
if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
response)) {
return;
}
PrintWriter out = new PrintWriter PrintWriter out = new PrintWriter
(HtmlQuoting.quoteOutputStream(response.getOutputStream())); (HtmlQuoting.quoteOutputStream(response.getOutputStream()));
ReflectionUtils.printThreadInfo(out, ""); ReflectionUtils.printThreadInfo(out, "");

View File

@ -59,6 +59,8 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import static org.apache.hadoop.fs.CommonConfigurationKeys.*; import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
@ -1295,7 +1297,7 @@ protected Server(String bindAddress, int port,
this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000); this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000);
this.secretManager = (SecretManager<TokenIdentifier>) secretManager; this.secretManager = (SecretManager<TokenIdentifier>) secretManager;
this.authorize = this.authorize =
conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
false); false);
this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();

View File

@ -26,6 +26,7 @@
import org.apache.commons.logging.*; import org.apache.commons.logging.*;
import org.apache.commons.logging.impl.*; import org.apache.commons.logging.impl.*;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.ServletUtil;
/** /**
@ -86,6 +87,13 @@ public static class Servlet extends HttpServlet {
public void doGet(HttpServletRequest request, HttpServletResponse response public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
// Do the authorization
if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
response)) {
return;
}
PrintWriter out = ServletUtil.initHTML(response, "Log Level"); PrintWriter out = ServletUtil.initHTML(response, "Log Level");
String logName = ServletUtil.getParameter(request, "log"); String logName = ServletUtil.getParameter(request, "log");
String level = ServletUtil.getParameter(request, "level"); String level = ServletUtil.getParameter(request, "level");

View File

@ -30,6 +30,7 @@
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.OutputRecord;
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
@ -100,6 +101,13 @@ Map<String, Map<String, List<TagsMetricsPair>>> makeMap(
@Override @Override
public void doGet(HttpServletRequest request, HttpServletResponse response) public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
// Do the authorization
if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
response)) {
return;
}
PrintWriter out = new PrintWriter(response.getOutputStream()); PrintWriter out = new PrintWriter(response.getOutputStream());
String format = request.getParameter("format"); String format = request.getParameter("format");
Collection<MetricsContext> allContexts = Collection<MetricsContext> allContexts =

View File

@ -21,6 +21,7 @@
import java.util.Map; import java.util.Map;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
/** /**
@ -35,7 +36,12 @@ public class ServiceAuthorizationManager {
/** /**
* Configuration key for controlling service-level authorization for Hadoop. * Configuration key for controlling service-level authorization for Hadoop.
*
* @deprecated Use
* {@link CommonConfigurationKeys#HADOOP_SECURITY_AUTHORIZATION}
* Instead.
*/ */
@Deprecated
public static final String SERVICE_AUTHORIZATION_CONFIG = public static final String SERVICE_AUTHORIZATION_CONFIG =
"hadoop.security.authorization"; "hadoop.security.authorization";

View File

@ -28,6 +28,7 @@
import org.apache.hadoop.cli.util.ComparatorBase; import org.apache.hadoop.cli.util.ComparatorBase;
import org.apache.hadoop.cli.util.ComparatorData; import org.apache.hadoop.cli.util.ComparatorData;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -104,7 +105,7 @@ public void setUp() throws Exception {
readTestConfigFile(); readTestConfigFile();
conf = new Configuration(); conf = new Configuration();
conf.setBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true); true);
clitestDataDir = new File(TEST_CACHE_DATA_DIR). clitestDataDir = new File(TEST_CACHE_DATA_DIR).

View File

@ -23,17 +23,35 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.http.HttpServer.QuotingInputFilter;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -134,4 +152,163 @@ private String readOutput(URL url) throws IOException {
readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>"))); readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
} }
/**
* Dummy filter that mimics as an authentication filter. Obtains user identity
* from the request parameter user.name. Wraps around the request so that
* request.getRemoteUser() returns the user identity.
*
*/
public static class DummyServletFilter implements Filter {
private static final Log LOG = LogFactory.getLog(
DummyServletFilter.class);
@Override
public void destroy() { }
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
final String userName = request.getParameter("user.name");
ServletRequest requestModified =
new HttpServletRequestWrapper((HttpServletRequest) request) {
@Override
public String getRemoteUser() {
return userName;
}
};
filterChain.doFilter(requestModified, response);
}
@Override
public void init(FilterConfig arg0) throws ServletException { }
}
/**
* FilterInitializer that initialized the DummyFilter.
*
*/
public static class DummyFilterInitializer extends FilterInitializer {
public DummyFilterInitializer() {
}
@Override
public void initFilter(FilterContainer container, Configuration conf) {
container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
}
}
/**
* Access a URL and get the corresponding return Http status code. The URL
* will be accessed as the passed user, by sending user.name request
* parameter.
*
* @param urlstring
* @param userName
* @return
* @throws IOException
*/
static int getHttpStatusCode(String urlstring, String userName)
throws IOException {
URL url = new URL(urlstring + "?user.name=" + userName);
System.out.println("Accessing " + url + " as user " + userName);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.connect();
return connection.getResponseCode();
}
/**
* Custom user->group mapping service.
*/
public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
static Map<String, List<String>> mapping = new HashMap<String, List<String>>();
static void clearMapping() {
mapping.clear();
}
@Override
public List<String> getGroups(String user) throws IOException {
return mapping.get(user);
}
}
/**
* Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
* servlets, when authentication filters are set, but authorization is not
* enabled.
* @throws Exception
*/
@Test
public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
Configuration conf = new Configuration();
// Authorization is disabled by default
conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
DummyFilterInitializer.class.getName());
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
MyGroupsProvider.class.getName());
Groups.getUserToGroupsMappingService(conf);
MyGroupsProvider.clearMapping();
MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf);
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
myServer.start();
int port = myServer.getPort();
String serverURL = "http://localhost:" + port + "/";
for (String servlet : new String[] { "conf", "logs", "stacks",
"logLevel", "metrics" }) {
for (String user : new String[] { "userA", "userB" }) {
assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+ servlet, user));
}
}
myServer.stop();
}
/**
* Verify the administrator access for /logs, /stacks, /conf, /logLevel and
* /metrics servlets.
*
* @throws Exception
*/
@Test
public void testAuthorizationOfDefaultServlets() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
conf.set(
CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY,
"userA,userB groupC,groupD");
conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
DummyFilterInitializer.class.getName());
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
MyGroupsProvider.class.getName());
Groups.getUserToGroupsMappingService(conf);
MyGroupsProvider.clearMapping();
MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf);
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
myServer.start();
int port = myServer.getPort();
String serverURL = "http://localhost:" + port + "/";
for (String servlet : new String[] { "conf", "logs", "stacks",
"logLevel", "metrics" }) {
for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+ servlet, user));
}
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
serverURL + servlet, "userE"));
}
myServer.stop();
}
} }

View File

@ -30,6 +30,7 @@
import org.apache.commons.logging.*; import org.apache.commons.logging.*;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.UTF8; import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -398,8 +399,8 @@ private void doRPCs(Configuration conf, boolean expectFailure) throws Exception
public void testAuthorization() throws Exception { public void testAuthorization() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean( conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, true); true);
// Expect to succeed // Expect to succeed
conf.set(ACL_CONFIG, "*"); conf.set(ACL_CONFIG, "*");