HDFS-2924. Standby checkpointing fails to authenticate in secure cluster. Contributed by Todd Lipcon.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1242439 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bad0a2a4a1
commit
5e26de982b
|
@ -23,6 +23,7 @@ import java.net.URI;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
|
import java.security.PrivilegedAction;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ServiceLoader;
|
import java.util.ServiceLoader;
|
||||||
|
@ -448,6 +449,27 @@ public class SecurityUtil {
|
||||||
return buildTokenService(NetUtils.createSocketAddr(uri.getAuthority()));
|
return buildTokenService(NetUtils.createSocketAddr(uri.getAuthority()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform the given action as the daemon's login user. If the login
|
||||||
|
* user cannot be determined, this will log a FATAL error and exit
|
||||||
|
* the whole JVM.
|
||||||
|
*/
|
||||||
|
public static <T> T doAsLoginUserOrFatal(PrivilegedAction<T> action) {
|
||||||
|
if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
|
UserGroupInformation ugi = null;
|
||||||
|
try {
|
||||||
|
ugi = UserGroupInformation.getLoginUser();
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.fatal("Exception while getting login user", e);
|
||||||
|
e.printStackTrace();
|
||||||
|
Runtime.getRuntime().exit(-1);
|
||||||
|
}
|
||||||
|
return ugi.doAs(action);
|
||||||
|
} else {
|
||||||
|
return action.run();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves a host subject to the security requirements determined by
|
* Resolves a host subject to the security requirements determined by
|
||||||
* hadoop.security.token.service.use_ip.
|
* hadoop.security.token.service.use_ip.
|
||||||
|
@ -597,5 +619,5 @@ public class SecurityUtil {
|
||||||
void setSearchDomains(String ... domains) {
|
void setSearchDomains(String ... domains) {
|
||||||
searchDomains = Arrays.asList(domains);
|
searchDomains = Arrays.asList(domains);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -186,3 +186,5 @@ HDFS-2905. HA: Standby NN NPE when shared edits dir is deleted. (Bikas Saha via
|
||||||
HDFS-2579. Starting delegation token manager during safemode fails. (todd)
|
HDFS-2579. Starting delegation token manager during safemode fails. (todd)
|
||||||
|
|
||||||
HDFS-2510. Add HA-related metrics. (atm)
|
HDFS-2510. Add HA-related metrics. (atm)
|
||||||
|
|
||||||
|
HDFS-2924. Standby checkpointing fails to authenticate in secure cluster. (todd)
|
||||||
|
|
|
@ -307,25 +307,14 @@ public class SecondaryNameNode implements Runnable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void run() {
|
public void run() {
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
SecurityUtil.doAsLoginUserOrFatal(
|
||||||
UserGroupInformation ugi = null;
|
new PrivilegedAction<Object>() {
|
||||||
try {
|
|
||||||
ugi = UserGroupInformation.getLoginUser();
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.error("Exception while getting login user", e);
|
|
||||||
e.printStackTrace();
|
|
||||||
Runtime.getRuntime().exit(-1);
|
|
||||||
}
|
|
||||||
ugi.doAs(new PrivilegedAction<Object>() {
|
|
||||||
@Override
|
@Override
|
||||||
public Object run() {
|
public Object run() {
|
||||||
doWork();
|
doWork();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
doWork();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
// The main work loop
|
// The main work loop
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.namenode.ha;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.security.PrivilegedAction;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
|
@ -40,6 +41,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
|
||||||
import org.apache.hadoop.ipc.RPC;
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.server.common.Util.now;
|
import static org.apache.hadoop.hdfs.server.common.Util.now;
|
||||||
|
|
||||||
|
@ -284,6 +286,17 @@ public class EditLogTailer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
|
SecurityUtil.doAsLoginUserOrFatal(
|
||||||
|
new PrivilegedAction<Object>() {
|
||||||
|
@Override
|
||||||
|
public Object run() {
|
||||||
|
doWork();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void doWork() {
|
||||||
while (shouldRun) {
|
while (shouldRun) {
|
||||||
try {
|
try {
|
||||||
// There's no point in triggering a log roll if the Standby hasn't
|
// There's no point in triggering a log roll if the Standby hasn't
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.SaveNamespaceCancelledException;
|
import org.apache.hadoop.hdfs.server.namenode.SaveNamespaceCancelledException;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
|
import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import static org.apache.hadoop.hdfs.server.common.Util.now;
|
import static org.apache.hadoop.hdfs.server.common.Util.now;
|
||||||
|
|
||||||
|
@ -212,26 +213,14 @@ public class StandbyCheckpointer {
|
||||||
public void run() {
|
public void run() {
|
||||||
// We have to make sure we're logged in as far as JAAS
|
// We have to make sure we're logged in as far as JAAS
|
||||||
// is concerned, in order to use kerberized SSL properly.
|
// is concerned, in order to use kerberized SSL properly.
|
||||||
// This code copied from SecondaryNameNode - TODO: refactor
|
SecurityUtil.doAsLoginUserOrFatal(
|
||||||
// to a utility function.
|
new PrivilegedAction<Object>() {
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
UserGroupInformation ugi = null;
|
|
||||||
try {
|
|
||||||
ugi = UserGroupInformation.getLoginUser();
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.error("Exception while getting login user", e);
|
|
||||||
Runtime.getRuntime().exit(-1);
|
|
||||||
}
|
|
||||||
ugi.doAs(new PrivilegedAction<Object>() {
|
|
||||||
@Override
|
@Override
|
||||||
public Object run() {
|
public Object run() {
|
||||||
doWork();
|
doWork();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
doWork();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue