diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java index 0fd400e71b3..985f60703e7 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java @@ -19,6 +19,8 @@ import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.security.auth.Subject; import javax.security.auth.login.AppConfigurationEntry; @@ -44,6 +46,9 @@ import java.util.Map; * sequence. */ public class KerberosAuthenticator implements Authenticator { + + private static Logger LOG = LoggerFactory.getLogger( + KerberosAuthenticator.class); /** * HTTP header used by the SPNEGO server endpoint during an authentication sequence. @@ -152,9 +157,18 @@ public class KerberosAuthenticator implements Authenticator { } conn.setRequestMethod(AUTH_HTTP_METHOD); conn.connect(); - if (isNegotiate()) { + + if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { + LOG.debug("JDK performed authentication on our behalf."); + // If the JDK already did the SPNEGO back-and-forth for + // us, just pull out the token. + AuthenticatedURL.extractToken(conn, token); + return; + } else if (isNegotiate()) { + LOG.debug("Performing our own SPNEGO sequence."); doSpnegoSequence(token); } else { + LOG.debug("Using fallback authenticator sequence."); getFallBackAuthenticator().authenticate(url, token); } } @@ -168,7 +182,11 @@ public class KerberosAuthenticator implements Authenticator { * @return the fallback {@link Authenticator}. */ protected Authenticator getFallBackAuthenticator() { - return new PseudoAuthenticator(); + Authenticator auth = new PseudoAuthenticator(); + if (connConfigurator != null) { + auth.setConnectionConfigurator(connConfigurator); + } + return auth; } /* @@ -197,11 +215,16 @@ public class KerberosAuthenticator implements Authenticator { AccessControlContext context = AccessController.getContext(); Subject subject = Subject.getSubject(context); if (subject == null) { + LOG.debug("No subject in context, logging in"); subject = new Subject(); LoginContext login = new LoginContext("", subject, null, new KerberosConfiguration()); login.login(); } + + if (LOG.isDebugEnabled()) { + LOG.debug("Using subject: " + subject); + } Subject.doAs(subject, new PrivilegedExceptionAction() { @Override @@ -257,6 +280,7 @@ public class KerberosAuthenticator implements Authenticator { * Sends the Kerberos token to the server. */ private void sendToken(byte[] outToken) throws IOException, AuthenticationException { + new Exception("sendToken").printStackTrace(System.out); String token = base64.encodeToString(outToken); conn = (HttpURLConnection) url.openConnection(); if (connConfigurator != null) { diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index f0651e93b24..4ed744fd400 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -41,6 +41,9 @@ Release 2.0.3-alpha - Unreleased HADOOP-8791. Fix rm command documentation to indicte it deletes files and not directories. (Jing Zhao via suresh) + HADOOP-8855. SSL-based image transfer does not work when Kerberos + is disabled. (todd via eli) + Release 2.0.2-alpha - 2012-09-07 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 4c09746326f..da0aa251a39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -499,7 +499,7 @@ public class SecurityUtil { * @throws IOException If unable to authenticate via SPNEGO */ public static URLConnection openSecureHttpConnection(URL url) throws IOException { - if(!UserGroupInformation.isSecurityEnabled()) { + if (!HttpConfig.isSecure() && !UserGroupInformation.isSecurityEnabled()) { return url.openConnection(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java index 421f7bc3fd0..86e7d77e63a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.tools; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; +import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -53,6 +54,7 @@ import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.RefreshUserMappingsProtocol; +import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.util.StringUtils; @@ -511,11 +513,17 @@ public class DFSAdmin extends FsShell { * @return an exit code indicating success or failure. * @throws IOException */ - public int fetchImage(String[] argv, int idx) throws IOException { - String infoServer = DFSUtil.getInfoServer( + public int fetchImage(final String[] argv, final int idx) throws IOException { + final String infoServer = DFSUtil.getInfoServer( HAUtil.getAddressOfActive(getDFS()), getConf(), false); - TransferFsImage.downloadMostRecentImageToDirectory(infoServer, - new File(argv[idx])); + SecurityUtil.doAsCurrentUser(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + TransferFsImage.downloadMostRecentImageToDirectory(infoServer, + new File(argv[idx])); + return null; + } + }); return 0; }