HADOOP-6748. Removes hadoop.cluster.administrators, cluster administrators acl is passed as parameter in constructor. Contributed by Amareshwari Sriramadasu

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@955975 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Amareshwari Sri Ramadasu 2010-06-18 12:30:36 +00:00
parent 5807065a5b
commit 4b8e1bda2d
5 changed files with 44 additions and 34 deletions

View File

@ -999,6 +999,9 @@ Release 0.21.0 - Unreleased
BUG FIXES
HADOOP-6748. Removes hadoop.cluster.administrators, cluster administrators
acl is passed as parameter in constructor. (amareshwari)
HADOOP-6828. Herrior uses old way of accessing logs directories (Sreekanth
Ramakrishnan via cos)

View File

@ -53,16 +53,6 @@
ordering of the filters.</description>
</property>
<property>
<name>hadoop.cluster.administrators</name>
<property>Users and/or groups who are designated as the administrators of a
hadoop cluster. For specifying a list of users and groups the format to use
is "user1,user2 group1,group". If set to '*', it allows all users/groups to
do administrations operations of the cluster. If set to '', it allows none.
</property>
<value>${user.name}</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>false</value>

View File

@ -153,11 +153,6 @@ public class CommonConfigurationKeys {
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static final String HADOOP_SECURITY_AUTHORIZATION =
"hadoop.security.authorization";
/**
* ACL denoting the administrator ACLs for a hadoop cluster.
*/
public final static String HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY =
"hadoop.cluster.administrators";
public static final String HADOOP_SECURITY_SERVICE_USER_NAME_KEY =
"hadoop.security.service.user.name.key";
}

View File

@ -86,6 +86,9 @@ public class HttpServer implements FilterContainer {
// The ServletContext attribute where the daemon Configuration
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
static final String ADMINS_ACL = "admins.acl";
private AccessControlList adminsAcl;
protected final Server webServer;
protected final Connector listener;
@ -115,9 +118,25 @@ public class HttpServer implements FilterContainer {
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf) throws IOException {
this(name, bindAddress, port, findPort, conf, null);
}
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<name>.
* @param name The name of the server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @param conf Configuration
* @param adminsAcl {@link AccessControlList} of the admins
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl)
throws IOException {
webServer = new Server();
this.findPort = findPort;
this.adminsAcl = adminsAcl;
listener = createBaseListener(conf);
listener.setHost(bindAddress);
listener.setPort(port);
@ -139,6 +158,7 @@ public class HttpServer implements FilterContainer {
webAppContext.setContextPath("/");
webAppContext.setWar(appDir + "/" + name);
webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
webServer.addHandler(webAppContext);
addDefaultApps(contexts, appDir, conf);
@ -201,7 +221,7 @@ public class HttpServer implements FilterContainer {
logContext.setResourceBase(logDir);
logContext.addServlet(AdminAuthorizedServlet.class, "/");
logContext.setDisplayName("logs");
logContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
setContextAttributes(logContext, conf);
defaultContexts.put(logContext, true);
}
// set up the context for "/static/*"
@ -209,10 +229,15 @@ public class HttpServer implements FilterContainer {
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
staticContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
setContextAttributes(staticContext, conf);
defaultContexts.put(staticContext, true);
}
private void setContextAttributes(Context context, Configuration conf) {
context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
}
/**
* Add default servlets.
*/
@ -586,20 +611,18 @@ public class HttpServer implements FilterContainer {
if (remoteUser == null) {
return true;
}
String adminsAclString =
conf.get(
CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY,
"*");
AccessControlList adminsAcl = new AccessControlList(adminsAclString);
AccessControlList adminsAcl = (AccessControlList) servletContext
.getAttribute(ADMINS_ACL);
UserGroupInformation remoteUserUGI =
UserGroupInformation.createRemoteUser(remoteUser);
if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+ remoteUser + " is unauthorized to access this page. "
+ "Only superusers/supergroup \"" + adminsAclString
+ "\" can access this page.");
return false;
if (adminsAcl != null) {
if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+ remoteUser + " is unauthorized to access this page. "
+ "Only \"" + adminsAcl.toString()
+ "\" can access this page.");
return false;
}
}
return true;
}

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -293,9 +294,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
conf.set(
CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY,
"userA,userB groupC,groupD");
conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
DummyFilterInitializer.class.getName());
@ -309,7 +307,8 @@ public class TestHttpServer extends HttpServerFunctionalTest {
MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf);
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf,
new AccessControlList("userA,userB groupC,groupD"));
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
myServer.start();
int port = myServer.getPort();