HADOOP-12752. Improve diagnostics/use of envvar/sysprop credential propagation. Contributed by Steve Loughran.

(cherry picked from commit cf3261570a)
This commit is contained in:
cnauroth 2016-02-08 09:36:09 -08:00
parent 2451a0a8e0
commit a76611d4ec
2 changed files with 21 additions and 4 deletions

View File

@ -454,6 +454,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12759. RollingFileSystemSink should eagerly rotate directories. HADOOP-12759. RollingFileSystemSink should eagerly rotate directories.
(Daniel Templeton via wang) (Daniel Templeton via wang)
HADOOP-12752. Improve diagnostics/use of envvar/sysprop credential
propagation (Steve Loughran via cnauroth)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -21,6 +21,7 @@
import static org.apache.hadoop.util.PlatformName.IBM_JAVA; import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException; import java.lang.reflect.UndeclaredThrowableException;
import java.security.AccessControlContext; import java.security.AccessControlContext;
@ -51,8 +52,6 @@
import javax.security.auth.login.LoginException; import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule; import javax.security.auth.spi.LoginModule;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -71,6 +70,8 @@
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* User and group information for Hadoop. * User and group information for Hadoop.
@ -81,7 +82,9 @@
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive", "Oozie"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive", "Oozie"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class UserGroupInformation { public class UserGroupInformation {
private static final Log LOG = LogFactory.getLog(UserGroupInformation.class); private static final Logger LOG = LoggerFactory.getLogger(
UserGroupInformation.class);
/** /**
* Percentage of the ticket window to use before we renew ticket. * Percentage of the ticket window to use before we renew ticket.
*/ */
@ -814,8 +817,19 @@ static void loginUserFromSubject(Subject subject) throws IOException {
// Load the token storage file and put all of the tokens into the // Load the token storage file and put all of the tokens into the
// user. Don't use the FileSystem API for reading since it has a lock // user. Don't use the FileSystem API for reading since it has a lock
// cycle (HADOOP-9212). // cycle (HADOOP-9212).
File source = new File(fileLocation);
LOG.debug("Reading credentials from location set in {}: {}",
HADOOP_TOKEN_FILE_LOCATION,
source.getCanonicalPath());
if (!source.isFile()) {
throw new FileNotFoundException("Source file "
+ source.getCanonicalPath() + " from "
+ HADOOP_TOKEN_FILE_LOCATION
+ " not found");
}
Credentials cred = Credentials.readTokenStorageFile( Credentials cred = Credentials.readTokenStorageFile(
new File(fileLocation), conf); source, conf);
LOG.debug("Loaded {} tokens", cred.numberOfTokens());
loginUser.addCredentials(cred); loginUser.addCredentials(cred);
} }
loginUser.spawnAutoRenewalThreadForUserCreds(); loginUser.spawnAutoRenewalThreadForUserCreds();