HADOOP-10566. Refactor proxyservers out of ProxyUsers. (Contributed by Benoy Antony)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1598140 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arpit Agarwal 2014-05-28 21:12:06 +00:00
parent 3045fdc7c6
commit d25d41d49b
8 changed files with 102 additions and 34 deletions

View File

@ -57,6 +57,9 @@ Release 2.5.0 - UNRELEASED
HADOOP-10618. Remove SingleNodeSetup.apt.vm. (Akira Ajisaka via
Arpit Agarwal)
HADOOP-10566. Refactor proxyservers out of ProxyUsers. (Benoy Antony via
Arpit Agarwal)
OPTIMIZATIONS
BUG FIXES

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.authorize;
import java.net.InetSocketAddress;
import java.util.Collection;
import java.util.HashSet;
import org.apache.hadoop.conf.Configuration;
public class ProxyServers {
public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
private static volatile Collection<String> proxyServers;
public static void refresh() {
refresh(new Configuration());
}
public static void refresh(Configuration conf){
Collection<String> tempServers = new HashSet<String>();
// trusted proxy servers such as http proxies
for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
InetSocketAddress addr = new InetSocketAddress(host, 0);
if (!addr.isUnresolved()) {
tempServers.add(addr.getAddress().getHostAddress());
}
}
proxyServers = tempServers;
}
public static boolean isProxyServer(String remoteAddr) {
if (proxyServers == null) {
refresh();
}
return proxyServers.contains(remoteAddr);
}
}

View File

@ -42,7 +42,6 @@ public class ProxyUsers {
private static final String CONF_GROUPS = ".groups";
private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
private static boolean init = false;
//list of users, groups and hosts per proxyuser
@ -52,8 +51,6 @@ public class ProxyUsers {
new HashMap<String, Collection<String>>();
private static Map<String, Collection<String>> proxyHosts =
new HashMap<String, Collection<String>>();
private static Collection<String> proxyServers =
new HashSet<String>();
/**
* reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
@ -73,7 +70,6 @@ public class ProxyUsers {
proxyGroups.clear();
proxyHosts.clear();
proxyUsers.clear();
proxyServers.clear();
// get all the new keys for users
String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
@ -98,22 +94,8 @@ public class ProxyUsers {
proxyHosts.put(entry.getKey(),
StringUtils.getTrimmedStringCollection(entry.getValue()));
}
// trusted proxy servers such as http proxies
for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
InetSocketAddress addr = new InetSocketAddress(host, 0);
if (!addr.isUnresolved()) {
proxyServers.add(addr.getAddress().getHostAddress());
}
}
init = true;
}
public static synchronized boolean isProxyServer(String remoteAddr) {
if(!init) {
refreshSuperUserGroupsConfiguration();
}
return proxyServers.contains(remoteAddr);
ProxyServers.refresh(conf);
}
/**

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.authorize;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.junit.Test;
public class TestProxyServers {
@Test
public void testProxyServer() {
Configuration conf = new Configuration();
assertFalse(ProxyServers.isProxyServer("1.1.1.1"));
conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
assertFalse(ProxyServers.isProxyServer("1.1.1.1"));
assertTrue(ProxyServers.isProxyServer("2.2.2.2"));
assertTrue(ProxyServers.isProxyServer("3.3.3.3"));
}
}

View File

@ -238,17 +238,6 @@ public class TestProxyUsers {
assertEquals (1,hosts.size());
}
@Test
public void testProxyServer() {
Configuration conf = new Configuration();
assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
assertTrue(ProxyUsers.isProxyServer("2.2.2.2"));
assertTrue(ProxyUsers.isProxyServer("3.3.3.3"));
}
private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
try {
ProxyUsers.authorize(proxyUgi, host);

View File

@ -76,6 +76,7 @@ import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.VersionInfo;
@ -672,7 +673,7 @@ public class JspHelper {
public static String getRemoteAddr(HttpServletRequest request) {
String remoteAddr = request.getRemoteAddr();
String proxyHeader = request.getHeader("X-Forwarded-For");
if (proxyHeader != null && ProxyUsers.isProxyServer(remoteAddr)) {
if (proxyHeader != null && ProxyServers.isProxyServer(remoteAddr)) {
final String clientAddr = proxyHeader.split(",")[0].trim();
if (!clientAddr.isEmpty()) {
remoteAddr = clientAddr;

View File

@ -58,6 +58,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@ -645,7 +646,7 @@ public class TestJspHelper {
when(req.getRemoteAddr()).thenReturn(proxyAddr);
when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr);
if (trusted) {
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
}
}
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);

View File

@ -32,14 +32,15 @@ import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.junit.Before;
import org.junit.Test;
@ -120,7 +121,7 @@ public class TestAuditLogger {
assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
// trusted proxied request
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
conn = (HttpURLConnection) uri.toURL().openConnection();
conn.setRequestMethod(op.getType().toString());