From 7785232da5a1db2d06f5d208ce3258c342a4d19c Mon Sep 17 00:00:00 2001 From: Devaraj Das Date: Thu, 25 Feb 2010 21:39:38 +0000 Subject: [PATCH] HADOOP-6568. Adds authorization for the default servlets. Contributed by Vinod Kumar Vavilapalli. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@916467 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 + src/java/core-default.xml | 10 + .../org/apache/hadoop/conf/ConfServlet.java | 7 + .../hadoop/fs/CommonConfigurationKeys.java | 7 + .../hadoop/http/AdminAuthorizedServlet.java | 46 +++++ .../org/apache/hadoop/http/HttpServer.java | 70 ++++++- src/java/org/apache/hadoop/ipc/Server.java | 4 +- src/java/org/apache/hadoop/log/LogLevel.java | 8 + .../apache/hadoop/metrics/MetricsServlet.java | 8 + .../ServiceAuthorizationManager.java | 6 + .../org/apache/hadoop/cli/CLITestHelper.java | 3 +- .../apache/hadoop/http/TestHttpServer.java | 177 ++++++++++++++++++ .../core/org/apache/hadoop/ipc/TestRPC.java | 5 +- 13 files changed, 345 insertions(+), 9 deletions(-) create mode 100644 src/java/org/apache/hadoop/http/AdminAuthorizedServlet.java diff --git a/CHANGES.txt b/CHANGES.txt index 8e054c92f61..5955eff7878 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -62,6 +62,9 @@ Trunk (unreleased changes) to change the default 1 MB, the maximum size when large IPC handler response buffer is reset. (suresh) + HADOOP-6568. Adds authorization for the default servlets. + (Vinod Kumar Vavilapalli via ddas) + IMPROVEMENTS HADOOP-6283. Improve the exception messages thrown by diff --git a/src/java/core-default.xml b/src/java/core-default.xml index 4a6bfae64bf..8baa34b4af8 100644 --- a/src/java/core-default.xml +++ b/src/java/core-default.xml @@ -53,6 +53,16 @@ ordering of the filters. + + hadoop.cluster.administrators + Users and/or groups who are designated as the administrators of a + hadoop cluster. For specifying a list of users and groups the format to use + is "user1,user2 group1,group". If set to '*', it allows all users/groups to + do administrations operations of the cluster. If set to '', it allows none. + + ${user.name} + + hadoop.security.authorization false diff --git a/src/java/org/apache/hadoop/conf/ConfServlet.java b/src/java/org/apache/hadoop/conf/ConfServlet.java index 1dba8c864e3..55ca2982f48 100644 --- a/src/java/org/apache/hadoop/conf/ConfServlet.java +++ b/src/java/org/apache/hadoop/conf/ConfServlet.java @@ -52,6 +52,13 @@ public class ConfServlet extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { + + // Do the authorization + if (!HttpServer.hasAdministratorAccess(getServletContext(), request, + response)) { + return; + } + String format = request.getParameter(FORMAT_PARAM); if (null == format) { format = FORMAT_XML; diff --git a/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java b/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java index b33aa7b3086..0852a7d6f78 100644 --- a/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java +++ b/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java @@ -133,5 +133,12 @@ public class CommonConfigurationKeys { public static final String HADOOP_SECURITY_GROUP_MAPPING = "hadoop.security.group.mapping"; public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS = "hadoop.security.groups.cache.secs"; public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; + public static final String HADOOP_SECURITY_AUTHORIZATION = + "hadoop.security.authorization"; + /** + * ACL denoting the administrator ACLs for a hadoop cluster. + */ + public final static String HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY = + "hadoop.cluster.administrators"; } diff --git a/src/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/src/java/org/apache/hadoop/http/AdminAuthorizedServlet.java new file mode 100644 index 00000000000..9e318aec518 --- /dev/null +++ b/src/java/org/apache/hadoop/http/AdminAuthorizedServlet.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.mortbay.jetty.servlet.DefaultServlet; + +/** + * General servlet which is admin-authorized. + * + */ +public class AdminAuthorizedServlet extends DefaultServlet { + + private static final long serialVersionUID = 1L; + + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + // Do the authorization + if (HttpServer.hasAdministratorAccess(getServletContext(), request, + response)) { + // Authorization is done. Just call super. + super.doGet(request, response); + } + } +} diff --git a/src/java/org/apache/hadoop/http/HttpServer.java b/src/java/org/apache/hadoop/http/HttpServer.java index f89015ad38c..1e6677bede2 100644 --- a/src/java/org/apache/hadoop/http/HttpServer.java +++ b/src/java/org/apache/hadoop/http/HttpServer.java @@ -27,11 +27,11 @@ import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Random; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; +import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; @@ -45,8 +45,11 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.metrics.MetricsServlet; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.conf.ConfServlet; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Handler; @@ -125,12 +128,13 @@ public class HttpServer implements FilterContainer { webServer.setHandler(contexts); webAppContext = new WebAppContext(); + webAppContext.setDisplayName("WepAppsContext"); webAppContext.setContextPath("/"); webAppContext.setWar(appDir + "/" + name); webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); webServer.addHandler(webAppContext); - addDefaultApps(contexts, appDir); + addDefaultApps(contexts, appDir, conf); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); final FilterInitializer[] initializers = getFilterInitializers(conf); @@ -182,19 +186,23 @@ public class HttpServer implements FilterContainer { * @throws IOException */ protected void addDefaultApps(ContextHandlerCollection parent, - final String appDir) throws IOException { + final String appDir, Configuration conf) throws IOException { // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = System.getProperty("hadoop.log.dir"); if (logDir != null) { Context logContext = new Context(parent, "/logs"); logContext.setResourceBase(logDir); - logContext.addServlet(DefaultServlet.class, "/"); + logContext.addServlet(AdminAuthorizedServlet.class, "/"); + logContext.setDisplayName("logs"); + logContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); defaultContexts.put(logContext, true); } // set up the context for "/static/*" Context staticContext = new Context(parent, "/static"); staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); + staticContext.setDisplayName("static"); + staticContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); defaultContexts.put(staticContext, true); } @@ -279,6 +287,8 @@ public class HttpServer implements FilterContainer { final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS); + LOG.info("Added filter " + name + " (class=" + classname + + ") to context " + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; for (Map.Entry e : defaultContexts.entrySet()) { if (e.getValue()) { @@ -566,6 +576,50 @@ public class HttpServer implements FilterContainer { : "Inactive HttpServer"; } + /** + * Does the user sending the HttpServletRequest has the administrator ACLs? If + * it isn't the case, response will be modified to send an error to the user. + * + * @param servletContext + * @param request + * @param response + * @return true if admin-authorized, false otherwise + * @throws IOException + */ + public static boolean hasAdministratorAccess( + ServletContext servletContext, HttpServletRequest request, + HttpServletResponse response) throws IOException { + Configuration conf = + (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); + + // If there is no authorization, anybody has administrator access. + if (!conf.getBoolean( + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { + return true; + } + + String remoteUser = request.getRemoteUser(); + if (remoteUser == null) { + return true; + } + + String adminsAclString = + conf.get( + CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY, + "*"); + AccessControlList adminsAcl = new AccessControlList(adminsAclString); + UserGroupInformation remoteUserUGI = + UserGroupInformation.createRemoteUser(remoteUser); + if (!adminsAcl.isUserAllowed(remoteUserUGI)) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + + remoteUser + " is unauthorized to access this page. " + + "Only superusers/supergroup \"" + adminsAclString + + "\" can access this page."); + return false; + } + return true; + } + /** * A very simple servlet to serve up a text representation of the current * stack traces. It both returns the stacks to the caller and logs them. @@ -578,7 +632,13 @@ public class HttpServer implements FilterContainer { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - + + // Do the authorization + if (!HttpServer.hasAdministratorAccess(getServletContext(), request, + response)) { + return; + } + PrintWriter out = new PrintWriter (HtmlQuoting.quoteOutputStream(response.getOutputStream())); ReflectionUtils.printThreadInfo(out, ""); diff --git a/src/java/org/apache/hadoop/ipc/Server.java b/src/java/org/apache/hadoop/ipc/Server.java index 8f6f0e8f965..1f02db48f1b 100644 --- a/src/java/org/apache/hadoop/ipc/Server.java +++ b/src/java/org/apache/hadoop/ipc/Server.java @@ -59,6 +59,8 @@ import javax.security.sasl.SaslServer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; + import static org.apache.hadoop.fs.CommonConfigurationKeys.*; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; @@ -1295,7 +1297,7 @@ public abstract class Server { this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000); this.secretManager = (SecretManager) secretManager; this.authorize = - conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, + conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false); this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); diff --git a/src/java/org/apache/hadoop/log/LogLevel.java b/src/java/org/apache/hadoop/log/LogLevel.java index 99fd3d0e7e7..389c9ce0bac 100644 --- a/src/java/org/apache/hadoop/log/LogLevel.java +++ b/src/java/org/apache/hadoop/log/LogLevel.java @@ -26,6 +26,7 @@ import javax.servlet.http.*; import org.apache.commons.logging.*; import org.apache.commons.logging.impl.*; +import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.util.ServletUtil; /** @@ -86,6 +87,13 @@ public class LogLevel { public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { + + // Do the authorization + if (!HttpServer.hasAdministratorAccess(getServletContext(), request, + response)) { + return; + } + PrintWriter out = ServletUtil.initHTML(response, "Log Level"); String logName = ServletUtil.getParameter(request, "log"); String level = ServletUtil.getParameter(request, "level"); diff --git a/src/java/org/apache/hadoop/metrics/MetricsServlet.java b/src/java/org/apache/hadoop/metrics/MetricsServlet.java index 44c0bd39654..770d6983df2 100644 --- a/src/java/org/apache/hadoop/metrics/MetricsServlet.java +++ b/src/java/org/apache/hadoop/metrics/MetricsServlet.java @@ -30,6 +30,7 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap; @@ -100,6 +101,13 @@ public class MetricsServlet extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { + + // Do the authorization + if (!HttpServer.hasAdministratorAccess(getServletContext(), request, + response)) { + return; + } + PrintWriter out = new PrintWriter(response.getOutputStream()); String format = request.getParameter("format"); Collection allContexts = diff --git a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index b8d1488b905..6cf9a4007dd 100644 --- a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -21,6 +21,7 @@ import java.util.IdentityHashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.UserGroupInformation; /** @@ -35,7 +36,12 @@ public class ServiceAuthorizationManager { /** * Configuration key for controlling service-level authorization for Hadoop. + * + * @deprecated Use + * {@link CommonConfigurationKeys#HADOOP_SECURITY_AUTHORIZATION} + * Instead. */ + @Deprecated public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; diff --git a/src/test/core/org/apache/hadoop/cli/CLITestHelper.java b/src/test/core/org/apache/hadoop/cli/CLITestHelper.java index bffb5f0d484..5df10b2c965 100644 --- a/src/test/core/org/apache/hadoop/cli/CLITestHelper.java +++ b/src/test/core/org/apache/hadoop/cli/CLITestHelper.java @@ -28,6 +28,7 @@ import org.apache.hadoop.cli.util.CommandExecutor.Result; import org.apache.hadoop.cli.util.ComparatorBase; import org.apache.hadoop.cli.util.ComparatorData; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.util.StringUtils; import static org.junit.Assert.assertTrue; @@ -104,7 +105,7 @@ public class CLITestHelper { readTestConfigFile(); conf = new Configuration(); - conf.setBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); clitestDataDir = new File(TEST_CACHE_DATA_DIR). diff --git a/src/test/core/org/apache/hadoop/http/TestHttpServer.java b/src/test/core/org/apache/hadoop/http/TestHttpServer.java index 5eec66e00fe..c6777a10a3b 100644 --- a/src/test/core/org/apache/hadoop/http/TestHttpServer.java +++ b/src/test/core/org/apache/hadoop/http/TestHttpServer.java @@ -23,17 +23,35 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; +import java.net.HttpURLConnection; import java.net.URL; +import java.util.Arrays; import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.ConfServlet; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.http.HttpServer.QuotingInputFilter; +import org.apache.hadoop.security.Groups; +import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -134,4 +152,163 @@ public class TestHttpServer { readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>"))); } + /** + * Dummy filter that mimics as an authentication filter. Obtains user identity + * from the request parameter user.name. Wraps around the request so that + * request.getRemoteUser() returns the user identity. + * + */ + public static class DummyServletFilter implements Filter { + + private static final Log LOG = LogFactory.getLog( + DummyServletFilter.class); + @Override + public void destroy() { } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain filterChain) throws IOException, ServletException { + final String userName = request.getParameter("user.name"); + ServletRequest requestModified = + new HttpServletRequestWrapper((HttpServletRequest) request) { + @Override + public String getRemoteUser() { + return userName; + } + }; + filterChain.doFilter(requestModified, response); + } + + @Override + public void init(FilterConfig arg0) throws ServletException { } + } + + /** + * FilterInitializer that initialized the DummyFilter. + * + */ + public static class DummyFilterInitializer extends FilterInitializer { + public DummyFilterInitializer() { + } + + @Override + public void initFilter(FilterContainer container, Configuration conf) { + container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null); + } + } + + /** + * Access a URL and get the corresponding return Http status code. The URL + * will be accessed as the passed user, by sending user.name request + * parameter. + * + * @param urlstring + * @param userName + * @return + * @throws IOException + */ + static int getHttpStatusCode(String urlstring, String userName) + throws IOException { + URL url = new URL(urlstring + "?user.name=" + userName); + System.out.println("Accessing " + url + " as user " + userName); + HttpURLConnection connection = (HttpURLConnection)url.openConnection(); + connection.connect(); + return connection.getResponseCode(); + } + + /** + * Custom user->group mapping service. + */ + public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping { + static Map> mapping = new HashMap>(); + + static void clearMapping() { + mapping.clear(); + } + + @Override + public List getGroups(String user) throws IOException { + return mapping.get(user); + } + } + + /** + * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics + * servlets, when authentication filters are set, but authorization is not + * enabled. + * @throws Exception + */ + @Test + public void testDisabledAuthorizationOfDefaultServlets() throws Exception { + + Configuration conf = new Configuration(); + + // Authorization is disabled by default + conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + DummyFilterInitializer.class.getName()); + conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, + MyGroupsProvider.class.getName()); + Groups.getUserToGroupsMappingService(conf); + MyGroupsProvider.clearMapping(); + MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA")); + MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB")); + + HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf); + myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf); + myServer.start(); + int port = myServer.getPort(); + String serverURL = "http://localhost:" + port + "/"; + for (String servlet : new String[] { "conf", "logs", "stacks", + "logLevel", "metrics" }) { + for (String user : new String[] { "userA", "userB" }) { + assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL + + servlet, user)); + } + } + myServer.stop(); + } + + /** + * Verify the administrator access for /logs, /stacks, /conf, /logLevel and + * /metrics servlets. + * + * @throws Exception + */ + @Test + public void testAuthorizationOfDefaultServlets() throws Exception { + Configuration conf = new Configuration(); + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, + true); + conf.set( + CommonConfigurationKeys.HADOOP_CLUSTER_ADMINISTRATORS_PROPERTY, + "userA,userB groupC,groupD"); + conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + DummyFilterInitializer.class.getName()); + + conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, + MyGroupsProvider.class.getName()); + Groups.getUserToGroupsMappingService(conf); + MyGroupsProvider.clearMapping(); + MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA")); + MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB")); + MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC")); + MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD")); + MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE")); + + HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf); + myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf); + myServer.start(); + int port = myServer.getPort(); + String serverURL = "http://localhost:" + port + "/"; + for (String servlet : new String[] { "conf", "logs", "stacks", + "logLevel", "metrics" }) { + for (String user : new String[] { "userA", "userB", "userC", "userD" }) { + assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL + + servlet, user)); + } + assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode( + serverURL + servlet, "userE")); + } + myServer.stop(); + } } diff --git a/src/test/core/org/apache/hadoop/ipc/TestRPC.java b/src/test/core/org/apache/hadoop/ipc/TestRPC.java index b94c88b4d19..55ab115a5f4 100644 --- a/src/test/core/org/apache/hadoop/ipc/TestRPC.java +++ b/src/test/core/org/apache/hadoop/ipc/TestRPC.java @@ -30,6 +30,7 @@ import java.util.Arrays; import org.apache.commons.logging.*; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.UTF8; import org.apache.hadoop.io.Writable; @@ -398,8 +399,8 @@ public class TestRPC extends TestCase { public void testAuthorization() throws Exception { Configuration conf = new Configuration(); - conf.setBoolean( - ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, true); + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, + true); // Expect to succeed conf.set(ACL_CONFIG, "*");