From a70587f368a6519fceb0388c14befec4e97e8293 Mon Sep 17 00:00:00 2001 From: Alejandro Abdelnur Date: Fri, 4 May 2012 03:31:44 +0000 Subject: [PATCH] HADOOP-8343. Allow configuration of authorization for JmxJsonServlet and MetricsServlet (tucu) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1333750 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 ++ .../org/apache/hadoop/conf/ConfServlet.java | 6 +-- .../fs/CommonConfigurationKeysPublic.java | 3 ++ .../org/apache/hadoop/http/HttpServer.java | 39 ++++++++++++++++--- .../org/apache/hadoop/jmx/JMXJsonServlet.java | 5 +-- .../apache/hadoop/metrics/MetricsServlet.java | 5 +-- .../src/main/resources/core-default.xml | 9 +++++ .../hadoop/http/HttpServerFunctionalTest.java | 12 ++++++ .../apache/hadoop/http/TestHttpServer.java | 23 ++++++++++- 9 files changed, 89 insertions(+), 16 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 7cff92b1fcc..8bb39d508b8 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -163,6 +163,9 @@ Release 2.0.0 - UNRELEASED HADOOP-8210. Common side of HDFS-3148: The client should be able to use multiple local interfaces for data transfer. (eli) + HADOOP-8343. Allow configuration of authorization for JmxJsonServlet and + MetricsServlet (tucu) + IMPROVEMENTS HADOOP-7524. Change RPC to allow multiple protocols including multuple diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java index 5ca8537135f..da39fa57b74 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java @@ -18,7 +18,6 @@ package org.apache.hadoop.conf; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.Writer; import javax.servlet.ServletException; @@ -57,9 +56,8 @@ public class ConfServlet extends HttpServlet { public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - // Do the authorization - if (!HttpServer.hasAdministratorAccess(getServletContext(), request, - response)) { + if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 9bc5c374593..67f3bc594c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -228,6 +228,9 @@ public class CommonConfigurationKeysPublic { public static final String HADOOP_SECURITY_AUTHORIZATION = "hadoop.security.authorization"; /** See core-default.xml */ + public static final String HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN = + "hadoop.security.instrumentation.requires.admin"; + /** See core-default.xml */ public static final String HADOOP_SECURITY_SERVICE_USER_NAME_KEY = "hadoop.security.service.user.name.key"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index 6a2c9fa360c..ab3e5999e5b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -779,6 +779,37 @@ public class HttpServer implements FilterContainer { : "Inactive HttpServer"; } + /** + * Checks the user has privileges to access to instrumentation servlets. + *

+ * If hadoop.security.instrumentation.requires.admin is set to FALSE + * (default value) it always returns TRUE. + *

+ * If hadoop.security.instrumentation.requires.admin is set to TRUE + * it will check that if the current user is in the admin ACLS. If the user is + * in the admin ACLs it returns TRUE, otherwise it returns FALSE. + * + * @param servletContext the servlet context. + * @param request the servlet request. + * @param response the servlet response. + * @return TRUE/FALSE based on the logic decribed above. + */ + public static boolean isInstrumentationAccessAllowed( + ServletContext servletContext, HttpServletRequest request, + HttpServletResponse response) throws IOException { + Configuration conf = + (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); + + boolean access = true; + boolean adminAccess = conf.getBoolean( + CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, + false); + if (adminAccess) { + access = hasAdministratorAccess(servletContext, request, response); + } + return access; + } + /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. @@ -794,7 +825,6 @@ public class HttpServer implements FilterContainer { HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); - // If there is no authorization, anybody has administrator access. if (!conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { @@ -834,12 +864,11 @@ public class HttpServer implements FilterContainer { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - response.setContentType("text/plain; charset=UTF-8"); - // Do the authorization - if (!HttpServer.hasAdministratorAccess(getServletContext(), request, - response)) { + if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + request, response)) { return; } + response.setContentType("text/plain; charset=UTF-8"); PrintWriter out = response.getWriter(); ReflectionUtils.printThreadInfo(out, ""); out.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index cc46aacd22b..8dc83a3c716 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -148,9 +148,8 @@ public class JMXJsonServlet extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) { try { - // Do the authorization - if (!HttpServer.hasAdministratorAccess(getServletContext(), request, - response)) { + if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + request, response)) { return; } JsonGenerator jg = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java index 92c342108d1..af469f9a34d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java @@ -106,9 +106,8 @@ public class MetricsServlet extends HttpServlet { public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - // Do the authorization - if (!HttpServer.hasAdministratorAccess(getServletContext(), request, - response)) { + if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 928e473d727..f94e49782eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -62,6 +62,15 @@ Is service-level authorization enabled? + + hadoop.security.instrumentation.requires.admin + false + + Indicates if administrator ACLs are required to access + instrumentation servlets (JMX, METRICS, CONF, STACKS). + + + hadoop.security.authentication simple diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java index b32d2a2d2cd..6dee7eb7134 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java @@ -19,6 +19,7 @@ package org.apache.hadoop.http; +import org.apache.hadoop.security.authorize.AccessControlList; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; @@ -70,6 +71,12 @@ public class HttpServerFunctionalTest extends Assert { return createServer(TEST, conf); } + public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl) + throws IOException { + prepareTestWebapp(); + return createServer(TEST, conf, adminsAcl); + } + /** * Create but do not start the test webapp server. The test webapp dir is * prepared/checked in advance. @@ -132,6 +139,11 @@ public class HttpServerFunctionalTest extends Assert { throws IOException { return new HttpServer(webapp, "0.0.0.0", 0, true, conf); } + + public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl) + throws IOException { + return new HttpServer(webapp, "0.0.0.0", 0, true, conf, adminsAcl); + } /** * Create an HttpServer instance for the given webapp * @param webapp the webapp to work with diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java index bd9c230c50c..a4d5c5a9c4e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java @@ -60,7 +60,6 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import org.mockito.Mock; import org.mockito.Mockito; import org.mortbay.util.ajax.JSON; @@ -360,6 +359,8 @@ public class TestHttpServer extends HttpServerFunctionalTest { Configuration conf = new Configuration(); conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, + true); conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, DummyFilterInitializer.class.getName()); @@ -468,6 +469,26 @@ public class TestHttpServer extends HttpServerFunctionalTest { } + @Test + public void testRequiresAuthorizationAccess() throws Exception { + Configuration conf = new Configuration(); + ServletContext context = Mockito.mock(ServletContext.class); + Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf); + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + HttpServletResponse response = Mockito.mock(HttpServletResponse.class); + + //requires admin access to instrumentation, FALSE by default + Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response)); + + //requires admin access to instrumentation, TRUE + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true); + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); + AccessControlList acls = Mockito.mock(AccessControlList.class); + Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); + Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); + Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response)); + } + @Test public void testBindAddress() throws Exception { checkBindAddress("0.0.0.0", 0, false).stop(); // hang onto this one for a bit more testing