HADOOP-16457. Fixed Kerberos activation in ServiceAuthorizationManager.

Contributed by Prabhu Joseph
This commit is contained in:
Eric Yang 2019-08-06 17:04:17 -04:00
parent f51702d539
commit 22430c10e2
2 changed files with 69 additions and 15 deletions

View File

@ -97,21 +97,23 @@ public class ServiceAuthorizationManager {
throw new AuthorizationException("Protocol " + protocol +
" is not known.");
}
// get client principal key to verify (if available)
KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
String clientPrincipal = null;
if (krbInfo != null) {
String clientKey = krbInfo.clientPrincipal();
if (clientKey != null && !clientKey.isEmpty()) {
try {
clientPrincipal = SecurityUtil.getServerPrincipal(
conf.get(clientKey), addr);
} catch (IOException e) {
throw (AuthorizationException) new AuthorizationException(
"Can't figure out Kerberos principal name for connection from "
+ addr + " for user=" + user + " protocol=" + protocol)
.initCause(e);
String clientPrincipal = null;
if (UserGroupInformation.isSecurityEnabled()) {
// get client principal key to verify (if available)
KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
if (krbInfo != null) {
String clientKey = krbInfo.clientPrincipal();
if (clientKey != null && !clientKey.isEmpty()) {
try {
clientPrincipal = SecurityUtil.getServerPrincipal(
conf.get(clientKey), addr);
} catch (IOException e) {
throw (AuthorizationException) new AuthorizationException(
"Can't figure out Kerberos principal name for connection from "
+ addr + " for user=" + user + " protocol=" + protocol)
.initCause(e);
}
}
}
}

View File

@ -20,13 +20,18 @@ package org.apache.hadoop.security.authorize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.lang.annotation.Annotation;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.SecurityInfo;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.TokenInfo;
import org.junit.Test;
public class TestServiceAuthorization {
@ -52,6 +57,53 @@ public class TestServiceAuthorization {
}
}
private static class CustomSecurityInfo extends SecurityInfo {
@Override
public KerberosInfo getKerberosInfo(Class<?> protocol,
Configuration conf) {
return new KerberosInfo() {
@Override
public Class<? extends Annotation> annotationType() {
return null;
}
@Override
public String serverPrincipal() {
return null;
}
@Override
public String clientPrincipal() {
return "dfs.datanode.kerberos.principal";
}
};
}
@Override
public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
return null;
}
}
@Test
public void testWithClientPrincipalOnUnsecureMode()
throws UnknownHostException {
UserGroupInformation hdfsUser = UserGroupInformation.createUserForTesting(
"hdfs", new String[] {"hadoop"});
ServiceAuthorizationManager serviceAuthorizationManager =
new ServiceAuthorizationManager();
SecurityUtil.setSecurityInfoProviders(new CustomSecurityInfo());
Configuration conf = new Configuration();
conf.set("dfs.datanode.kerberos.principal", "dn/_HOST@EXAMPLE.COM");
conf.set(ACL_CONFIG, "user1 hadoop");
serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
try {
serviceAuthorizationManager.authorize(hdfsUser, TestProtocol.class, conf,
InetAddress.getByName(ADDRESS));
} catch (AuthorizationException e) {
fail();
}
}
@Test
public void testDefaultAcl() {
ServiceAuthorizationManager serviceAuthorizationManager =