HBASE-20886 [Auth] Support keytab login in hbase client
Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
parent
803b62f0b2
commit
a8e184dc77
|
@ -21,6 +21,9 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR;
|
|||
import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey;
|
||||
import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY;
|
||||
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.ChoreService;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer;
|
||||
|
@ -99,10 +102,15 @@ class AsyncConnectionImpl implements AsyncConnection {
|
|||
private final AtomicReference<CompletableFuture<MasterService.Interface>> masterStubMakeFuture =
|
||||
new AtomicReference<>();
|
||||
|
||||
private ChoreService authService;
|
||||
|
||||
public AsyncConnectionImpl(Configuration conf, AsyncRegistry registry, String clusterId,
|
||||
User user) {
|
||||
this.conf = conf;
|
||||
this.user = user;
|
||||
if (user.isLoginFromKeytab()) {
|
||||
spawnRenewalChore(user.getUGI());
|
||||
}
|
||||
this.connConf = new AsyncConnectionConfiguration(conf);
|
||||
this.registry = registry;
|
||||
this.rpcClient = RpcClientFactory.createClient(conf, clusterId);
|
||||
|
@ -119,6 +127,11 @@ class AsyncConnectionImpl implements AsyncConnection {
|
|||
}
|
||||
}
|
||||
|
||||
private void spawnRenewalChore(final UserGroupInformation user) {
|
||||
authService = new ChoreService("Relogin service");
|
||||
authService.scheduleChore(AuthUtil.getAuthRenewalChore(user));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Configuration getConfiguration() {
|
||||
return conf;
|
||||
|
@ -128,6 +141,9 @@ class AsyncConnectionImpl implements AsyncConnection {
|
|||
public void close() {
|
||||
IOUtils.closeQuietly(rpcClient);
|
||||
IOUtils.closeQuietly(registry);
|
||||
if (authService != null) {
|
||||
authService.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
|
@ -47,6 +49,16 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
|
|||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Since 2.2.0, Connection created by ConnectionFactory can contain user-specified kerberos
|
||||
* credentials if caller has following two configurations set:
|
||||
* <ul>
|
||||
* <li>hbase.client.keytab.file, points to a valid keytab on the local filesystem
|
||||
* <li>hbase.client.kerberos.principal, gives the Kerberos principal to use
|
||||
* </ul>
|
||||
* By this way, caller can directly connect to kerberized cluster without caring login and
|
||||
* credentials renewal logic in application.
|
||||
* <pre>
|
||||
* </pre>
|
||||
* Similarly, {@link Connection} also returns {@link Admin} and {@link RegionLocator}
|
||||
* implementations.
|
||||
* @see Connection
|
||||
|
@ -84,7 +96,8 @@ public class ConnectionFactory {
|
|||
* @return Connection object for <code>conf</code>
|
||||
*/
|
||||
public static Connection createConnection() throws IOException {
|
||||
return createConnection(HBaseConfiguration.create(), null, null);
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
return createConnection(conf, null, AuthUtil.loginClient(conf));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -111,7 +124,7 @@ public class ConnectionFactory {
|
|||
* @return Connection object for <code>conf</code>
|
||||
*/
|
||||
public static Connection createConnection(Configuration conf) throws IOException {
|
||||
return createConnection(conf, null, null);
|
||||
return createConnection(conf, null, AuthUtil.loginClient(conf));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -140,7 +153,7 @@ public class ConnectionFactory {
|
|||
*/
|
||||
public static Connection createConnection(Configuration conf, ExecutorService pool)
|
||||
throws IOException {
|
||||
return createConnection(conf, pool, null);
|
||||
return createConnection(conf, pool, AuthUtil.loginClient(conf));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -196,13 +209,8 @@ public class ConnectionFactory {
|
|||
* @param pool the thread pool to use for batch operations
|
||||
* @return Connection object for <code>conf</code>
|
||||
*/
|
||||
public static Connection createConnection(Configuration conf, ExecutorService pool, User user)
|
||||
throws IOException {
|
||||
if (user == null) {
|
||||
UserProvider provider = UserProvider.instantiate(conf);
|
||||
user = provider.getCurrent();
|
||||
}
|
||||
|
||||
public static Connection createConnection(Configuration conf, ExecutorService pool,
|
||||
final User user) throws IOException {
|
||||
String className = conf.get(ClusterConnection.HBASE_CLIENT_CONNECTION_IMPL,
|
||||
ConnectionImplementation.class.getName());
|
||||
Class<?> clazz;
|
||||
|
@ -216,7 +224,9 @@ public class ConnectionFactory {
|
|||
Constructor<?> constructor = clazz.getDeclaredConstructor(Configuration.class,
|
||||
ExecutorService.class, User.class);
|
||||
constructor.setAccessible(true);
|
||||
return (Connection) constructor.newInstance(conf, pool, user);
|
||||
return user.runAs(
|
||||
(PrivilegedExceptionAction<Connection>)() ->
|
||||
(Connection) constructor.newInstance(conf, pool, user));
|
||||
} catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
|
@ -243,7 +253,7 @@ public class ConnectionFactory {
|
|||
public static CompletableFuture<AsyncConnection> createAsyncConnection(Configuration conf) {
|
||||
User user;
|
||||
try {
|
||||
user = UserProvider.instantiate(conf).getCurrent();
|
||||
user = AuthUtil.loginClient(conf);
|
||||
} catch (IOException e) {
|
||||
CompletableFuture<AsyncConnection> future = new CompletableFuture<>();
|
||||
future.completeExceptionally(e);
|
||||
|
@ -269,7 +279,7 @@ public class ConnectionFactory {
|
|||
* @throws IOException
|
||||
*/
|
||||
public static CompletableFuture<AsyncConnection> createAsyncConnection(Configuration conf,
|
||||
User user) {
|
||||
final User user) {
|
||||
CompletableFuture<AsyncConnection> future = new CompletableFuture<>();
|
||||
AsyncRegistry registry = AsyncRegistryFactory.getRegistry(conf);
|
||||
registry.getClusterId().whenComplete((clusterId, error) -> {
|
||||
|
@ -284,7 +294,10 @@ public class ConnectionFactory {
|
|||
Class<? extends AsyncConnection> clazz = conf.getClass(HBASE_CLIENT_ASYNC_CONNECTION_IMPL,
|
||||
AsyncConnectionImpl.class, AsyncConnection.class);
|
||||
try {
|
||||
future.complete(ReflectionUtils.newInstance(clazz, conf, registry, clusterId, user));
|
||||
future.complete(
|
||||
user.runAs((PrivilegedExceptionAction<? extends AsyncConnection>)() ->
|
||||
ReflectionUtils.newInstance(clazz, conf, registry, clusterId, user))
|
||||
);
|
||||
} catch (Exception e) {
|
||||
future.completeExceptionally(e);
|
||||
}
|
||||
|
|
|
@ -46,7 +46,9 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.CallQueueTooBigException;
|
||||
import org.apache.hadoop.hbase.ChoreService;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
|
@ -76,6 +78,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
|||
import org.apache.hadoop.hbase.util.ReflectionUtils;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -217,6 +220,8 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
/** lock guards against multiple threads trying to query the meta region at the same time */
|
||||
private final ReentrantLock userRegionLock = new ReentrantLock();
|
||||
|
||||
private ChoreService authService;
|
||||
|
||||
/**
|
||||
* constructor
|
||||
* @param conf Configuration object
|
||||
|
@ -225,6 +230,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
ExecutorService pool, User user) throws IOException {
|
||||
this.conf = conf;
|
||||
this.user = user;
|
||||
if (user != null && user.isLoginFromKeytab()) {
|
||||
spawnRenewalChore(user.getUGI());
|
||||
}
|
||||
this.batchPool = pool;
|
||||
this.connectionConfig = new ConnectionConfiguration(conf);
|
||||
this.closed = false;
|
||||
|
@ -314,6 +322,11 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private void spawnRenewalChore(final UserGroupInformation user) {
|
||||
authService = new ChoreService("Relogin service");
|
||||
authService.scheduleChore(AuthUtil.getAuthRenewalChore(user));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param useMetaReplicas
|
||||
*/
|
||||
|
@ -1934,6 +1947,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
if (rpcClient != null) {
|
||||
rpcClient.close();
|
||||
}
|
||||
if (authService != null) {
|
||||
authService.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import java.net.UnknownHostException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.util.DNS;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
|
@ -65,35 +66,112 @@ import org.slf4j.LoggerFactory;
|
|||
*
|
||||
* See the "Running Canary in a Kerberos-enabled Cluster" section of the HBase Reference Guide for
|
||||
* an example of configuring a user of this Auth Chore to run on a secure cluster.
|
||||
* <pre>
|
||||
* </pre>
|
||||
* This class will be internal use only from 2.2.0 version, and will transparently work
|
||||
* for kerberized applications. For more, please refer
|
||||
* <a href="http://hbase.apache.org/book.html#hbase.secure.configuration">Client-side Configuration for Secure Operation</a>
|
||||
*
|
||||
* @deprecated since 2.2.0, to be removed in hbase-3.0.0.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
public class AuthUtil {
|
||||
public final class AuthUtil {
|
||||
// TODO: Mark this class InterfaceAudience.Private from 3.0.0
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AuthUtil.class);
|
||||
|
||||
/** Prefix character to denote group names */
|
||||
private static final String GROUP_PREFIX = "@";
|
||||
|
||||
/** Client keytab file */
|
||||
public static final String HBASE_CLIENT_KEYTAB_FILE = "hbase.client.keytab.file";
|
||||
|
||||
/** Client principal */
|
||||
public static final String HBASE_CLIENT_KERBEROS_PRINCIPAL = "hbase.client.keytab.principal";
|
||||
|
||||
private AuthUtil() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if security is enabled and if so, launches chore for refreshing kerberos ticket.
|
||||
* @param conf the hbase service configuration
|
||||
* @return a ScheduledChore for renewals, if needed, and null otherwise.
|
||||
* For kerberized cluster, return login user (from kinit or from keytab if specified).
|
||||
* For non-kerberized cluster, return system user.
|
||||
* @param conf configuartion file
|
||||
* @return user
|
||||
* @throws IOException login exception
|
||||
*/
|
||||
public static ScheduledChore getAuthChore(Configuration conf) throws IOException {
|
||||
UserProvider userProvider = UserProvider.instantiate(conf);
|
||||
// login the principal (if using secure Hadoop)
|
||||
boolean securityEnabled =
|
||||
userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled();
|
||||
if (!securityEnabled) return null;
|
||||
String host = null;
|
||||
@InterfaceAudience.Private
|
||||
public static User loginClient(Configuration conf) throws IOException {
|
||||
UserProvider provider = UserProvider.instantiate(conf);
|
||||
User user = provider.getCurrent();
|
||||
boolean securityOn = provider.isHBaseSecurityEnabled() && provider.isHadoopSecurityEnabled();
|
||||
|
||||
if (securityOn) {
|
||||
boolean fromKeytab = provider.shouldLoginFromKeytab();
|
||||
if (user.getUGI().hasKerberosCredentials()) {
|
||||
// There's already a login user.
|
||||
// But we should avoid misuse credentials which is a dangerous security issue,
|
||||
// so here check whether user specified a keytab and a principal:
|
||||
// 1. Yes, check if user principal match.
|
||||
// a. match, just return.
|
||||
// b. mismatch, login using keytab.
|
||||
// 2. No, user may login through kinit, this is the old way, also just return.
|
||||
if (fromKeytab) {
|
||||
return checkPrincipalMatch(conf, user.getUGI().getUserName()) ? user :
|
||||
loginFromKeytabAndReturnUser(provider);
|
||||
}
|
||||
return user;
|
||||
} else if (fromKeytab) {
|
||||
// Kerberos is on and client specify a keytab and principal, but client doesn't login yet.
|
||||
return loginFromKeytabAndReturnUser(provider);
|
||||
}
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
private static boolean checkPrincipalMatch(Configuration conf, String loginUserName) {
|
||||
String configuredUserName = conf.get(HBASE_CLIENT_KERBEROS_PRINCIPAL);
|
||||
boolean match = configuredUserName.equals(loginUserName);
|
||||
if (!match) {
|
||||
LOG.warn("Trying to login with a different user: {}, existed user is {}.",
|
||||
configuredUserName, loginUserName);
|
||||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
private static User loginFromKeytabAndReturnUser(UserProvider provider) throws IOException {
|
||||
try {
|
||||
host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
|
||||
provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Error while trying to login as user {} through {}, with message: {}.",
|
||||
HBASE_CLIENT_KERBEROS_PRINCIPAL, HBASE_CLIENT_KEYTAB_FILE,
|
||||
ioe.getMessage());
|
||||
throw ioe;
|
||||
}
|
||||
return provider.getCurrent();
|
||||
}
|
||||
|
||||
/**
|
||||
* For kerberized cluster, return login user (from kinit or from keytab).
|
||||
* Principal should be the following format: name/fully.qualified.domain.name@REALM.
|
||||
* For non-kerberized cluster, return system user.
|
||||
* <p>
|
||||
* NOT recommend to use to method unless you're sure what you're doing, it is for canary only.
|
||||
* Please use User#loginClient.
|
||||
* @param conf configuration file
|
||||
* @return user
|
||||
* @throws IOException login exception
|
||||
*/
|
||||
private static User loginClientAsService(Configuration conf) throws IOException {
|
||||
UserProvider provider = UserProvider.instantiate(conf);
|
||||
if (provider.isHBaseSecurityEnabled() && provider.isHadoopSecurityEnabled()) {
|
||||
try {
|
||||
if (provider.shouldLoginFromKeytab()) {
|
||||
String host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
|
||||
conf.get("hbase.client.dns.interface", "default"),
|
||||
conf.get("hbase.client.dns.nameserver", "default")));
|
||||
userProvider.login("hbase.client.keytab.file", "hbase.client.kerberos.principal", host);
|
||||
provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL, host);
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.error("Error resolving host name: " + e.getMessage(), e);
|
||||
throw e;
|
||||
|
@ -101,9 +179,52 @@ public class AuthUtil {
|
|||
LOG.error("Error while trying to perform the initial login: " + e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return provider.getCurrent();
|
||||
}
|
||||
|
||||
final UserGroupInformation ugi = userProvider.getCurrent().getUGI();
|
||||
Stoppable stoppable = new Stoppable() {
|
||||
/**
|
||||
* Checks if security is enabled and if so, launches chore for refreshing kerberos ticket.
|
||||
* @return a ScheduledChore for renewals.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static ScheduledChore getAuthRenewalChore(final UserGroupInformation user) {
|
||||
if (!user.hasKerberosCredentials()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Stoppable stoppable = createDummyStoppable();
|
||||
// if you're in debug mode this is useful to avoid getting spammed by the getTGT()
|
||||
// you can increase this, keeping in mind that the default refresh window is 0.8
|
||||
// e.g. 5min tgt * 0.8 = 4min refresh so interval is better be way less than 1min
|
||||
final int CHECK_TGT_INTERVAL = 30 * 1000; // 30sec
|
||||
return new ScheduledChore("RefreshCredentials", stoppable, CHECK_TGT_INTERVAL) {
|
||||
@Override
|
||||
protected void chore() {
|
||||
try {
|
||||
user.checkTGTAndReloginFromKeytab();
|
||||
} catch (IOException e) {
|
||||
LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if security is enabled and if so, launches chore for refreshing kerberos ticket.
|
||||
* @param conf the hbase service configuration
|
||||
* @return a ScheduledChore for renewals, if needed, and null otherwise.
|
||||
* @deprecated Deprecated since 2.2.0, this method will be internal use only after 3.0.0.
|
||||
*/
|
||||
@Deprecated
|
||||
public static ScheduledChore getAuthChore(Configuration conf) throws IOException {
|
||||
// TODO: Mark this method InterfaceAudience.Private from 3.0.0
|
||||
User user = loginClientAsService(conf);
|
||||
return getAuthRenewalChore(user.getUGI());
|
||||
}
|
||||
|
||||
private static Stoppable createDummyStoppable() {
|
||||
return new Stoppable() {
|
||||
private volatile boolean isStopped = false;
|
||||
|
||||
@Override
|
||||
|
@ -116,25 +237,6 @@ public class AuthUtil {
|
|||
return isStopped;
|
||||
}
|
||||
};
|
||||
|
||||
// if you're in debug mode this is useful to avoid getting spammed by the getTGT()
|
||||
// you can increase this, keeping in mind that the default refresh window is 0.8
|
||||
// e.g. 5min tgt * 0.8 = 4min refresh so interval is better be way less than 1min
|
||||
final int CHECK_TGT_INTERVAL = 30 * 1000; // 30sec
|
||||
|
||||
ScheduledChore refreshCredentials =
|
||||
new ScheduledChore("RefreshCredentials", stoppable, CHECK_TGT_INTERVAL) {
|
||||
@Override
|
||||
protected void chore() {
|
||||
try {
|
||||
ugi.checkTGTAndReloginFromKeytab();
|
||||
} catch (IOException e) {
|
||||
LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return refreshCredentials;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -27,9 +27,11 @@ import java.util.Collection;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.util.Methods;
|
||||
import org.apache.hadoop.security.Groups;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
|
@ -136,6 +138,13 @@ public abstract class User {
|
|||
ugi.addToken(token);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if user credentials are obtained from keytab.
|
||||
*/
|
||||
public boolean isLoginFromKeytab() {
|
||||
return ugi.isFromKeytab();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
|
@ -231,6 +240,16 @@ public abstract class User {
|
|||
SecureHadoopUser.login(conf, fileConfKey, principalConfKey, localhost);
|
||||
}
|
||||
|
||||
/**
|
||||
* Login with the given keytab and principal.
|
||||
* @param keytabLocation path of keytab
|
||||
* @param pricipalName login principal
|
||||
* @throws IOException underlying exception from UserGroupInformation.loginUserFromKeytab
|
||||
*/
|
||||
public static void login(String keytabLocation, String pricipalName) throws IOException {
|
||||
SecureHadoopUser.login(keytabLocation, pricipalName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether or not Kerberos authentication is configured for Hadoop.
|
||||
* For non-secure Hadoop, this always returns <code>false</code>.
|
||||
|
@ -250,6 +269,21 @@ public abstract class User {
|
|||
return "kerberos".equalsIgnoreCase(conf.get(HBASE_SECURITY_CONF_KEY));
|
||||
}
|
||||
|
||||
/**
|
||||
* In secure environment, if a user specified his keytab and principal,
|
||||
* a hbase client will try to login with them. Otherwise, hbase client will try to obtain
|
||||
* ticket(through kinit) from system.
|
||||
* @param conf configuration file
|
||||
* @return true if keytab and principal are configured
|
||||
*/
|
||||
public static boolean shouldLoginFromKeytab(Configuration conf) {
|
||||
Optional<String> keytab =
|
||||
Optional.ofNullable(conf.get(AuthUtil.HBASE_CLIENT_KEYTAB_FILE));
|
||||
Optional<String> principal =
|
||||
Optional.ofNullable(conf.get(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL));
|
||||
return keytab.isPresent() && principal.isPresent();
|
||||
}
|
||||
|
||||
/* Concrete implementations */
|
||||
|
||||
/**
|
||||
|
@ -345,6 +379,19 @@ public abstract class User {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Login through configured keytab and pricipal.
|
||||
* @param keytabLocation location of keytab
|
||||
* @param principalName principal in keytab
|
||||
* @throws IOException exception from UserGroupInformation.loginUserFromKeytab
|
||||
*/
|
||||
public static void login(String keytabLocation, String principalName)
|
||||
throws IOException {
|
||||
if (isSecurityEnabled()) {
|
||||
UserGroupInformation.loginUserFromKeytab(principalName, keytabLocation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the result of {@code UserGroupInformation.isSecurityEnabled()}.
|
||||
*/
|
||||
|
|
|
@ -160,6 +160,15 @@ public class UserProvider extends BaseConfigurable {
|
|||
return User.isSecurityEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* In secure environment, if a user specified his keytab and principal,
|
||||
* a hbase client will try to login with them. Otherwise, hbase client will try to obtain
|
||||
* ticket(through kinit) from system.
|
||||
*/
|
||||
public boolean shouldLoginFromKeytab() {
|
||||
return User.shouldLoginFromKeytab(this.getConf());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the current user within the current execution context
|
||||
* @throws IOException if the user cannot be loaded
|
||||
|
@ -182,7 +191,8 @@ public class UserProvider extends BaseConfigurable {
|
|||
|
||||
/**
|
||||
* Log in the current process using the given configuration keys for the credential file and login
|
||||
* principal.
|
||||
* principal. It is for SPN(Service Principal Name) login. SPN should be this format,
|
||||
* servicename/fully.qualified.domain.name@REALM.
|
||||
* <p>
|
||||
* <strong>This is only applicable when running on secure Hadoop</strong> -- see
|
||||
* org.apache.hadoop.security.SecurityUtil#login(Configuration,String,String,String). On regular
|
||||
|
@ -197,4 +207,15 @@ public class UserProvider extends BaseConfigurable {
|
|||
throws IOException {
|
||||
User.login(getConf(), fileConfKey, principalConfKey, localhost);
|
||||
}
|
||||
|
||||
/**
|
||||
* Login with given keytab and principal. This can be used for both SPN(Service Principal Name)
|
||||
* and UPN(User Principal Name) which format should be clientname@REALM.
|
||||
* @param fileConfKey config name for client keytab
|
||||
* @param principalConfKey config name for client principal
|
||||
* @throws IOException underlying exception from UserGroupInformation.loginUserFromKeytab
|
||||
*/
|
||||
public void login(String fileConfKey, String principalConfKey) throws IOException {
|
||||
User.login(getConf().get(fileConfKey), getConf().get(principalConfKey));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.security;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -36,6 +37,8 @@ public class HBaseKerberosUtils {
|
|||
public static final String KRB_PRINCIPAL = "hbase.regionserver.kerberos.principal";
|
||||
public static final String MASTER_KRB_PRINCIPAL = "hbase.master.kerberos.principal";
|
||||
public static final String KRB_KEYTAB_FILE = "hbase.regionserver.keytab.file";
|
||||
public static final String CLIENT_PRINCIPAL = AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL;
|
||||
public static final String CLIENT_KEYTAB = AuthUtil.HBASE_CLIENT_KEYTAB_FILE;
|
||||
|
||||
public static boolean isKerberosPropertySetted() {
|
||||
String krbPrincipal = System.getProperty(KRB_PRINCIPAL);
|
||||
|
@ -54,6 +57,14 @@ public class HBaseKerberosUtils {
|
|||
setSystemProperty(KRB_KEYTAB_FILE, keytabFile);
|
||||
}
|
||||
|
||||
public static void setClientPrincipalForTesting(String clientPrincipal) {
|
||||
setSystemProperty(CLIENT_PRINCIPAL, clientPrincipal);
|
||||
}
|
||||
|
||||
public static void setClientKeytabForTesting(String clientKeytab) {
|
||||
setSystemProperty(CLIENT_KEYTAB, clientKeytab);
|
||||
}
|
||||
|
||||
public static void setSystemProperty(String propertyName, String propertyValue) {
|
||||
System.setProperty(propertyName, propertyValue);
|
||||
}
|
||||
|
@ -66,6 +77,14 @@ public class HBaseKerberosUtils {
|
|||
return System.getProperty(KRB_PRINCIPAL);
|
||||
}
|
||||
|
||||
public static String getClientPrincipalForTesting() {
|
||||
return System.getProperty(CLIENT_PRINCIPAL);
|
||||
}
|
||||
|
||||
public static String getClientKeytabForTesting() {
|
||||
return System.getProperty(CLIENT_KEYTAB);
|
||||
}
|
||||
|
||||
public static Configuration getConfigurationWoPrincipal() {
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
|
||||
|
|
|
@ -17,17 +17,21 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.security;
|
||||
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getConfigurationWoPrincipal;
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getClientKeytabForTesting;
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getClientPrincipalForTesting;
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getKeytabFileForTesting;
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getPrincipalForTesting;
|
||||
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getSecuredConfiguration;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.SecurityTests;
|
||||
|
@ -57,12 +61,18 @@ public class TestUsersOperationsWithSecureHadoop {
|
|||
|
||||
private static String PRINCIPAL;
|
||||
|
||||
private static String CLIENT_NAME;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
KDC = TEST_UTIL.setupMiniKdc(KEYTAB_FILE);
|
||||
PRINCIPAL = "hbase/" + HOST;
|
||||
KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL);
|
||||
CLIENT_NAME = "foo";
|
||||
KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, CLIENT_NAME);
|
||||
HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm());
|
||||
HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
|
||||
HBaseKerberosUtils.setClientPrincipalForTesting(CLIENT_NAME + "@" + KDC.getRealm());
|
||||
HBaseKerberosUtils.setClientKeytabForTesting(KEYTAB_FILE.getAbsolutePath());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -84,13 +94,8 @@ public class TestUsersOperationsWithSecureHadoop {
|
|||
*/
|
||||
@Test
|
||||
public void testUserLoginInSecureHadoop() throws Exception {
|
||||
UserGroupInformation defaultLogin = UserGroupInformation.getLoginUser();
|
||||
Configuration conf = getConfigurationWoPrincipal();
|
||||
User.login(conf, HBaseKerberosUtils.KRB_KEYTAB_FILE, HBaseKerberosUtils.KRB_PRINCIPAL,
|
||||
"localhost");
|
||||
|
||||
UserGroupInformation failLogin = UserGroupInformation.getLoginUser();
|
||||
assertTrue("ugi should be the same in case fail login", defaultLogin.equals(failLogin));
|
||||
// Default login is system user.
|
||||
UserGroupInformation defaultLogin = UserGroupInformation.getCurrentUser();
|
||||
|
||||
String nnKeyTab = getKeytabFileForTesting();
|
||||
String dnPrincipal = getPrincipalForTesting();
|
||||
|
@ -98,7 +103,7 @@ public class TestUsersOperationsWithSecureHadoop {
|
|||
assertNotNull("KerberosKeytab was not specified", nnKeyTab);
|
||||
assertNotNull("KerberosPrincipal was not specified", dnPrincipal);
|
||||
|
||||
conf = getSecuredConfiguration();
|
||||
Configuration conf = getSecuredConfiguration();
|
||||
UserGroupInformation.setConfiguration(conf);
|
||||
|
||||
User.login(conf, HBaseKerberosUtils.KRB_KEYTAB_FILE, HBaseKerberosUtils.KRB_PRINCIPAL,
|
||||
|
@ -107,4 +112,40 @@ public class TestUsersOperationsWithSecureHadoop {
|
|||
assertFalse("ugi should be different in in case success login",
|
||||
defaultLogin.equals(successLogin));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLoginWithUserKeytabAndPrincipal() throws Exception {
|
||||
String clientKeytab = getClientKeytabForTesting();
|
||||
String clientPrincipal = getClientPrincipalForTesting();
|
||||
assertNotNull("Path for client keytab is not specified.", clientKeytab);
|
||||
assertNotNull("Client principal is not specified.", clientPrincipal);
|
||||
|
||||
Configuration conf = getSecuredConfiguration();
|
||||
conf.set(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, clientKeytab);
|
||||
conf.set(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL, clientPrincipal);
|
||||
UserGroupInformation.setConfiguration(conf);
|
||||
|
||||
UserProvider provider = UserProvider.instantiate(conf);
|
||||
assertTrue("Client principal or keytab is empty", provider.shouldLoginFromKeytab());
|
||||
|
||||
provider.login(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL);
|
||||
User loginUser = provider.getCurrent();
|
||||
assertEquals(CLIENT_NAME, loginUser.getShortName());
|
||||
assertEquals(getClientPrincipalForTesting(), loginUser.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthUtilLogin() throws Exception {
|
||||
String clientKeytab = getClientKeytabForTesting();
|
||||
String clientPrincipal = getClientPrincipalForTesting();
|
||||
Configuration conf = getSecuredConfiguration();
|
||||
conf.set(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, clientKeytab);
|
||||
conf.set(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL, clientPrincipal);
|
||||
UserGroupInformation.setConfiguration(conf);
|
||||
|
||||
User user = AuthUtil.loginClient(conf);
|
||||
assertTrue(user.isLoginFromKeytab());
|
||||
assertEquals(CLIENT_NAME, user.getShortName());
|
||||
assertEquals(getClientPrincipalForTesting(), user.getName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,7 +179,25 @@ Add the following to the `hbase-site.xml` file on every client:
|
|||
</property>
|
||||
----
|
||||
|
||||
The client environment must be logged in to Kerberos from KDC or keytab via the `kinit` command before communication with the HBase cluster will be possible.
|
||||
Before 2.2.0 version, the client environment must be logged in to Kerberos from KDC or keytab via the `kinit` command before communication with the HBase cluster will be possible.
|
||||
|
||||
Since 2.2.0, client can specify the following configurations in `hbase-site.xml`:
|
||||
[source,xml]
|
||||
----
|
||||
<property>
|
||||
<name>hbase.client.keytab.file</name>
|
||||
<value>/local/path/to/client/keytab</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>hbase.client.keytab.principal</name>
|
||||
<value>foo@EXAMPLE.COM</value>
|
||||
</property>
|
||||
----
|
||||
Then application can automatically do the login and credential renewal jobs without client interference.
|
||||
|
||||
It's optional feature, client, who upgrades to 2.2.0, can still keep their login and credential renewal logic already did in older version, as long as keeping `hbase.client.keytab.file`
|
||||
and `hbase.client.keytab.principal` are unset.
|
||||
|
||||
Be advised that if the `hbase.security.authentication` in the client- and server-side site files do not match, the client will not be able to communicate with the cluster.
|
||||
|
||||
|
|
Loading…
Reference in New Issue