diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java index cf26756d915..a05764ee9bf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java @@ -21,6 +21,9 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR; import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey; import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY; +import org.apache.hadoop.hbase.AuthUtil; +import org.apache.hadoop.hbase.ChoreService; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.io.netty.util.HashedWheelTimer; @@ -99,10 +102,15 @@ class AsyncConnectionImpl implements AsyncConnection { private final AtomicReference> masterStubMakeFuture = new AtomicReference<>(); + private ChoreService authService; + public AsyncConnectionImpl(Configuration conf, AsyncRegistry registry, String clusterId, User user) { this.conf = conf; this.user = user; + if (user.isLoginFromKeytab()) { + spawnRenewalChore(user.getUGI()); + } this.connConf = new AsyncConnectionConfiguration(conf); this.registry = registry; this.rpcClient = RpcClientFactory.createClient(conf, clusterId); @@ -119,6 +127,11 @@ class AsyncConnectionImpl implements AsyncConnection { } } + private void spawnRenewalChore(final UserGroupInformation user) { + authService = new ChoreService("Relogin service"); + authService.scheduleChore(AuthUtil.getAuthRenewalChore(user)); + } + @Override public Configuration getConfiguration() { return conf; @@ -128,6 +141,9 @@ class AsyncConnectionImpl implements AsyncConnection { public void close() { IOUtils.closeQuietly(rpcClient); IOUtils.closeQuietly(registry); + if (authService != null) { + authService.shutdown(); + } } @Override diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java index 1712a5480a1..e24af7411d2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java @@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.lang.reflect.Constructor; +import java.security.PrivilegedExceptionAction; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.security.User; @@ -47,6 +49,16 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; * } * * + * Since 2.2.0, Connection created by ConnectionFactory can contain user-specified kerberos + * credentials if caller has following two configurations set: + * + * By this way, caller can directly connect to kerberized cluster without caring login and + * credentials renewal logic in application. + *
+ * 
* Similarly, {@link Connection} also returns {@link Admin} and {@link RegionLocator} * implementations. * @see Connection @@ -84,7 +96,8 @@ public class ConnectionFactory { * @return Connection object for conf */ public static Connection createConnection() throws IOException { - return createConnection(HBaseConfiguration.create(), null, null); + Configuration conf = HBaseConfiguration.create(); + return createConnection(conf, null, AuthUtil.loginClient(conf)); } /** @@ -111,7 +124,7 @@ public class ConnectionFactory { * @return Connection object for conf */ public static Connection createConnection(Configuration conf) throws IOException { - return createConnection(conf, null, null); + return createConnection(conf, null, AuthUtil.loginClient(conf)); } /** @@ -140,7 +153,7 @@ public class ConnectionFactory { */ public static Connection createConnection(Configuration conf, ExecutorService pool) throws IOException { - return createConnection(conf, pool, null); + return createConnection(conf, pool, AuthUtil.loginClient(conf)); } /** @@ -196,13 +209,8 @@ public class ConnectionFactory { * @param pool the thread pool to use for batch operations * @return Connection object for conf */ - public static Connection createConnection(Configuration conf, ExecutorService pool, User user) - throws IOException { - if (user == null) { - UserProvider provider = UserProvider.instantiate(conf); - user = provider.getCurrent(); - } - + public static Connection createConnection(Configuration conf, ExecutorService pool, + final User user) throws IOException { String className = conf.get(ClusterConnection.HBASE_CLIENT_CONNECTION_IMPL, ConnectionImplementation.class.getName()); Class clazz; @@ -216,7 +224,9 @@ public class ConnectionFactory { Constructor constructor = clazz.getDeclaredConstructor(Configuration.class, ExecutorService.class, User.class); constructor.setAccessible(true); - return (Connection) constructor.newInstance(conf, pool, user); + return user.runAs( + (PrivilegedExceptionAction)() -> + (Connection) constructor.newInstance(conf, pool, user)); } catch (Exception e) { throw new IOException(e); } @@ -243,7 +253,7 @@ public class ConnectionFactory { public static CompletableFuture createAsyncConnection(Configuration conf) { User user; try { - user = UserProvider.instantiate(conf).getCurrent(); + user = AuthUtil.loginClient(conf); } catch (IOException e) { CompletableFuture future = new CompletableFuture<>(); future.completeExceptionally(e); @@ -269,7 +279,7 @@ public class ConnectionFactory { * @throws IOException */ public static CompletableFuture createAsyncConnection(Configuration conf, - User user) { + final User user) { CompletableFuture future = new CompletableFuture<>(); AsyncRegistry registry = AsyncRegistryFactory.getRegistry(conf); registry.getClusterId().whenComplete((clusterId, error) -> { @@ -284,7 +294,10 @@ public class ConnectionFactory { Class clazz = conf.getClass(HBASE_CLIENT_ASYNC_CONNECTION_IMPL, AsyncConnectionImpl.class, AsyncConnection.class); try { - future.complete(ReflectionUtils.newInstance(clazz, conf, registry, clusterId, user)); + future.complete( + user.runAs((PrivilegedExceptionAction)() -> + ReflectionUtils.newInstance(clazz, conf, registry, clusterId, user)) + ); } catch (Exception e) { future.completeExceptionally(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index f78005f836f..7e07dafa7e7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -46,7 +46,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.CallQueueTooBigException; +import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; @@ -76,6 +78,7 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; @@ -217,6 +220,8 @@ class ConnectionImplementation implements ClusterConnection, Closeable { /** lock guards against multiple threads trying to query the meta region at the same time */ private final ReentrantLock userRegionLock = new ReentrantLock(); + private ChoreService authService; + /** * constructor * @param conf Configuration object @@ -225,6 +230,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable { ExecutorService pool, User user) throws IOException { this.conf = conf; this.user = user; + if (user != null && user.isLoginFromKeytab()) { + spawnRenewalChore(user.getUGI()); + } this.batchPool = pool; this.connectionConfig = new ConnectionConfiguration(conf); this.closed = false; @@ -314,6 +322,11 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } } + private void spawnRenewalChore(final UserGroupInformation user) { + authService = new ChoreService("Relogin service"); + authService.scheduleChore(AuthUtil.getAuthRenewalChore(user)); + } + /** * @param useMetaReplicas */ @@ -1934,6 +1947,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable { if (rpcClient != null) { rpcClient.close(); } + if (authService != null) { + authService.shutdown(); + } } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java index 5880b8c33b6..78da55d0ae5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.net.UnknownHostException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.Strings; @@ -65,45 +66,165 @@ import org.slf4j.LoggerFactory; * * See the "Running Canary in a Kerberos-enabled Cluster" section of the HBase Reference Guide for * an example of configuring a user of this Auth Chore to run on a secure cluster. + *
+ * 
+ * This class will be internal use only from 2.2.0 version, and will transparently work + * for kerberized applications. For more, please refer + * Client-side Configuration for Secure Operation + * + * @deprecated since 2.2.0, to be removed in hbase-3.0.0. */ +@Deprecated @InterfaceAudience.Public -public class AuthUtil { +public final class AuthUtil { + // TODO: Mark this class InterfaceAudience.Private from 3.0.0 private static final Logger LOG = LoggerFactory.getLogger(AuthUtil.class); /** Prefix character to denote group names */ private static final String GROUP_PREFIX = "@"; + /** Client keytab file */ + public static final String HBASE_CLIENT_KEYTAB_FILE = "hbase.client.keytab.file"; + + /** Client principal */ + public static final String HBASE_CLIENT_KERBEROS_PRINCIPAL = "hbase.client.keytab.principal"; + private AuthUtil() { super(); } + /** + * For kerberized cluster, return login user (from kinit or from keytab if specified). + * For non-kerberized cluster, return system user. + * @param conf configuartion file + * @return user + * @throws IOException login exception + */ + @InterfaceAudience.Private + public static User loginClient(Configuration conf) throws IOException { + UserProvider provider = UserProvider.instantiate(conf); + User user = provider.getCurrent(); + boolean securityOn = provider.isHBaseSecurityEnabled() && provider.isHadoopSecurityEnabled(); + + if (securityOn) { + boolean fromKeytab = provider.shouldLoginFromKeytab(); + if (user.getUGI().hasKerberosCredentials()) { + // There's already a login user. + // But we should avoid misuse credentials which is a dangerous security issue, + // so here check whether user specified a keytab and a principal: + // 1. Yes, check if user principal match. + // a. match, just return. + // b. mismatch, login using keytab. + // 2. No, user may login through kinit, this is the old way, also just return. + if (fromKeytab) { + return checkPrincipalMatch(conf, user.getUGI().getUserName()) ? user : + loginFromKeytabAndReturnUser(provider); + } + return user; + } else if (fromKeytab) { + // Kerberos is on and client specify a keytab and principal, but client doesn't login yet. + return loginFromKeytabAndReturnUser(provider); + } + } + return user; + } + + private static boolean checkPrincipalMatch(Configuration conf, String loginUserName) { + String configuredUserName = conf.get(HBASE_CLIENT_KERBEROS_PRINCIPAL); + boolean match = configuredUserName.equals(loginUserName); + if (!match) { + LOG.warn("Trying to login with a different user: {}, existed user is {}.", + configuredUserName, loginUserName); + } + return match; + } + + private static User loginFromKeytabAndReturnUser(UserProvider provider) throws IOException { + try { + provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL); + } catch (IOException ioe) { + LOG.error("Error while trying to login as user {} through {}, with message: {}.", + HBASE_CLIENT_KERBEROS_PRINCIPAL, HBASE_CLIENT_KEYTAB_FILE, + ioe.getMessage()); + throw ioe; + } + return provider.getCurrent(); + } + + /** + * For kerberized cluster, return login user (from kinit or from keytab). + * Principal should be the following format: name/fully.qualified.domain.name@REALM. + * For non-kerberized cluster, return system user. + *

+ * NOT recommend to use to method unless you're sure what you're doing, it is for canary only. + * Please use User#loginClient. + * @param conf configuration file + * @return user + * @throws IOException login exception + */ + private static User loginClientAsService(Configuration conf) throws IOException { + UserProvider provider = UserProvider.instantiate(conf); + if (provider.isHBaseSecurityEnabled() && provider.isHadoopSecurityEnabled()) { + try { + if (provider.shouldLoginFromKeytab()) { + String host = Strings.domainNamePointerToHostName(DNS.getDefaultHost( + conf.get("hbase.client.dns.interface", "default"), + conf.get("hbase.client.dns.nameserver", "default"))); + provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL, host); + } + } catch (UnknownHostException e) { + LOG.error("Error resolving host name: " + e.getMessage(), e); + throw e; + } catch (IOException e) { + LOG.error("Error while trying to perform the initial login: " + e.getMessage(), e); + throw e; + } + } + return provider.getCurrent(); + } + + /** + * Checks if security is enabled and if so, launches chore for refreshing kerberos ticket. + * @return a ScheduledChore for renewals. + */ + @InterfaceAudience.Private + public static ScheduledChore getAuthRenewalChore(final UserGroupInformation user) { + if (!user.hasKerberosCredentials()) { + return null; + } + + Stoppable stoppable = createDummyStoppable(); + // if you're in debug mode this is useful to avoid getting spammed by the getTGT() + // you can increase this, keeping in mind that the default refresh window is 0.8 + // e.g. 5min tgt * 0.8 = 4min refresh so interval is better be way less than 1min + final int CHECK_TGT_INTERVAL = 30 * 1000; // 30sec + return new ScheduledChore("RefreshCredentials", stoppable, CHECK_TGT_INTERVAL) { + @Override + protected void chore() { + try { + user.checkTGTAndReloginFromKeytab(); + } catch (IOException e) { + LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e); + } + } + }; + } + /** * Checks if security is enabled and if so, launches chore for refreshing kerberos ticket. * @param conf the hbase service configuration * @return a ScheduledChore for renewals, if needed, and null otherwise. + * @deprecated Deprecated since 2.2.0, this method will be internal use only after 3.0.0. */ + @Deprecated public static ScheduledChore getAuthChore(Configuration conf) throws IOException { - UserProvider userProvider = UserProvider.instantiate(conf); - // login the principal (if using secure Hadoop) - boolean securityEnabled = - userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled(); - if (!securityEnabled) return null; - String host = null; - try { - host = Strings.domainNamePointerToHostName(DNS.getDefaultHost( - conf.get("hbase.client.dns.interface", "default"), - conf.get("hbase.client.dns.nameserver", "default"))); - userProvider.login("hbase.client.keytab.file", "hbase.client.kerberos.principal", host); - } catch (UnknownHostException e) { - LOG.error("Error resolving host name: " + e.getMessage(), e); - throw e; - } catch (IOException e) { - LOG.error("Error while trying to perform the initial login: " + e.getMessage(), e); - throw e; - } + // TODO: Mark this method InterfaceAudience.Private from 3.0.0 + User user = loginClientAsService(conf); + return getAuthRenewalChore(user.getUGI()); + } - final UserGroupInformation ugi = userProvider.getCurrent().getUGI(); - Stoppable stoppable = new Stoppable() { + private static Stoppable createDummyStoppable() { + return new Stoppable() { private volatile boolean isStopped = false; @Override @@ -116,25 +237,6 @@ public class AuthUtil { return isStopped; } }; - - // if you're in debug mode this is useful to avoid getting spammed by the getTGT() - // you can increase this, keeping in mind that the default refresh window is 0.8 - // e.g. 5min tgt * 0.8 = 4min refresh so interval is better be way less than 1min - final int CHECK_TGT_INTERVAL = 30 * 1000; // 30sec - - ScheduledChore refreshCredentials = - new ScheduledChore("RefreshCredentials", stoppable, CHECK_TGT_INTERVAL) { - @Override - protected void chore() { - try { - ugi.checkTGTAndReloginFromKeytab(); - } catch (IOException e) { - LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e); - } - } - }; - - return refreshCredentials; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java index af6d442c4fc..733a658ad4c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java @@ -27,9 +27,11 @@ import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ExecutionException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.SecurityUtil; @@ -136,6 +138,13 @@ public abstract class User { ugi.addToken(token); } + /** + * @return true if user credentials are obtained from keytab. + */ + public boolean isLoginFromKeytab() { + return ugi.isFromKeytab(); + } + @Override public boolean equals(Object o) { if (this == o) { @@ -231,6 +240,16 @@ public abstract class User { SecureHadoopUser.login(conf, fileConfKey, principalConfKey, localhost); } + /** + * Login with the given keytab and principal. + * @param keytabLocation path of keytab + * @param pricipalName login principal + * @throws IOException underlying exception from UserGroupInformation.loginUserFromKeytab + */ + public static void login(String keytabLocation, String pricipalName) throws IOException { + SecureHadoopUser.login(keytabLocation, pricipalName); + } + /** * Returns whether or not Kerberos authentication is configured for Hadoop. * For non-secure Hadoop, this always returns false. @@ -250,6 +269,21 @@ public abstract class User { return "kerberos".equalsIgnoreCase(conf.get(HBASE_SECURITY_CONF_KEY)); } + /** + * In secure environment, if a user specified his keytab and principal, + * a hbase client will try to login with them. Otherwise, hbase client will try to obtain + * ticket(through kinit) from system. + * @param conf configuration file + * @return true if keytab and principal are configured + */ + public static boolean shouldLoginFromKeytab(Configuration conf) { + Optional keytab = + Optional.ofNullable(conf.get(AuthUtil.HBASE_CLIENT_KEYTAB_FILE)); + Optional principal = + Optional.ofNullable(conf.get(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL)); + return keytab.isPresent() && principal.isPresent(); + } + /* Concrete implementations */ /** @@ -345,6 +379,19 @@ public abstract class User { } } + /** + * Login through configured keytab and pricipal. + * @param keytabLocation location of keytab + * @param principalName principal in keytab + * @throws IOException exception from UserGroupInformation.loginUserFromKeytab + */ + public static void login(String keytabLocation, String principalName) + throws IOException { + if (isSecurityEnabled()) { + UserGroupInformation.loginUserFromKeytab(principalName, keytabLocation); + } + } + /** * Returns the result of {@code UserGroupInformation.isSecurityEnabled()}. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java index 0f7d4284c8c..17796ee56d5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java @@ -160,6 +160,15 @@ public class UserProvider extends BaseConfigurable { return User.isSecurityEnabled(); } + /** + * In secure environment, if a user specified his keytab and principal, + * a hbase client will try to login with them. Otherwise, hbase client will try to obtain + * ticket(through kinit) from system. + */ + public boolean shouldLoginFromKeytab() { + return User.shouldLoginFromKeytab(this.getConf()); + } + /** * @return the current user within the current execution context * @throws IOException if the user cannot be loaded @@ -182,7 +191,8 @@ public class UserProvider extends BaseConfigurable { /** * Log in the current process using the given configuration keys for the credential file and login - * principal. + * principal. It is for SPN(Service Principal Name) login. SPN should be this format, + * servicename/fully.qualified.domain.name@REALM. *

* This is only applicable when running on secure Hadoop -- see * org.apache.hadoop.security.SecurityUtil#login(Configuration,String,String,String). On regular @@ -197,4 +207,15 @@ public class UserProvider extends BaseConfigurable { throws IOException { User.login(getConf(), fileConfKey, principalConfKey, localhost); } + + /** + * Login with given keytab and principal. This can be used for both SPN(Service Principal Name) + * and UPN(User Principal Name) which format should be clientname@REALM. + * @param fileConfKey config name for client keytab + * @param principalConfKey config name for client principal + * @throws IOException underlying exception from UserGroupInformation.loginUserFromKeytab + */ + public void login(String fileConfKey, String principalConfKey) throws IOException { + User.login(getConf().get(fileConfKey), getConf().get(principalConfKey)); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java index b946e7458a0..4ce931f4473 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.security; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -36,6 +37,8 @@ public class HBaseKerberosUtils { public static final String KRB_PRINCIPAL = "hbase.regionserver.kerberos.principal"; public static final String MASTER_KRB_PRINCIPAL = "hbase.master.kerberos.principal"; public static final String KRB_KEYTAB_FILE = "hbase.regionserver.keytab.file"; + public static final String CLIENT_PRINCIPAL = AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL; + public static final String CLIENT_KEYTAB = AuthUtil.HBASE_CLIENT_KEYTAB_FILE; public static boolean isKerberosPropertySetted() { String krbPrincipal = System.getProperty(KRB_PRINCIPAL); @@ -54,6 +57,14 @@ public class HBaseKerberosUtils { setSystemProperty(KRB_KEYTAB_FILE, keytabFile); } + public static void setClientPrincipalForTesting(String clientPrincipal) { + setSystemProperty(CLIENT_PRINCIPAL, clientPrincipal); + } + + public static void setClientKeytabForTesting(String clientKeytab) { + setSystemProperty(CLIENT_KEYTAB, clientKeytab); + } + public static void setSystemProperty(String propertyName, String propertyValue) { System.setProperty(propertyName, propertyValue); } @@ -66,6 +77,14 @@ public class HBaseKerberosUtils { return System.getProperty(KRB_PRINCIPAL); } + public static String getClientPrincipalForTesting() { + return System.getProperty(CLIENT_PRINCIPAL); + } + + public static String getClientKeytabForTesting() { + return System.getProperty(CLIENT_KEYTAB); + } + public static Configuration getConfigurationWoPrincipal() { Configuration conf = HBaseConfiguration.create(); conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java index b69c5d9d6ba..d240f91786b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java @@ -17,17 +17,21 @@ */ package org.apache.hadoop.hbase.security; -import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getConfigurationWoPrincipal; +import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getClientKeytabForTesting; +import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getClientPrincipalForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getKeytabFileForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getPrincipalForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getSecuredConfiguration; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; + import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.SecurityTests; @@ -57,12 +61,18 @@ public class TestUsersOperationsWithSecureHadoop { private static String PRINCIPAL; + private static String CLIENT_NAME; + @BeforeClass public static void setUp() throws Exception { KDC = TEST_UTIL.setupMiniKdc(KEYTAB_FILE); PRINCIPAL = "hbase/" + HOST; - KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL); + CLIENT_NAME = "foo"; + KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, CLIENT_NAME); HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm()); + HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath()); + HBaseKerberosUtils.setClientPrincipalForTesting(CLIENT_NAME + "@" + KDC.getRealm()); + HBaseKerberosUtils.setClientKeytabForTesting(KEYTAB_FILE.getAbsolutePath()); } @AfterClass @@ -84,13 +94,8 @@ public class TestUsersOperationsWithSecureHadoop { */ @Test public void testUserLoginInSecureHadoop() throws Exception { - UserGroupInformation defaultLogin = UserGroupInformation.getLoginUser(); - Configuration conf = getConfigurationWoPrincipal(); - User.login(conf, HBaseKerberosUtils.KRB_KEYTAB_FILE, HBaseKerberosUtils.KRB_PRINCIPAL, - "localhost"); - - UserGroupInformation failLogin = UserGroupInformation.getLoginUser(); - assertTrue("ugi should be the same in case fail login", defaultLogin.equals(failLogin)); + // Default login is system user. + UserGroupInformation defaultLogin = UserGroupInformation.getCurrentUser(); String nnKeyTab = getKeytabFileForTesting(); String dnPrincipal = getPrincipalForTesting(); @@ -98,7 +103,7 @@ public class TestUsersOperationsWithSecureHadoop { assertNotNull("KerberosKeytab was not specified", nnKeyTab); assertNotNull("KerberosPrincipal was not specified", dnPrincipal); - conf = getSecuredConfiguration(); + Configuration conf = getSecuredConfiguration(); UserGroupInformation.setConfiguration(conf); User.login(conf, HBaseKerberosUtils.KRB_KEYTAB_FILE, HBaseKerberosUtils.KRB_PRINCIPAL, @@ -107,4 +112,40 @@ public class TestUsersOperationsWithSecureHadoop { assertFalse("ugi should be different in in case success login", defaultLogin.equals(successLogin)); } + + @Test + public void testLoginWithUserKeytabAndPrincipal() throws Exception { + String clientKeytab = getClientKeytabForTesting(); + String clientPrincipal = getClientPrincipalForTesting(); + assertNotNull("Path for client keytab is not specified.", clientKeytab); + assertNotNull("Client principal is not specified.", clientPrincipal); + + Configuration conf = getSecuredConfiguration(); + conf.set(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, clientKeytab); + conf.set(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL, clientPrincipal); + UserGroupInformation.setConfiguration(conf); + + UserProvider provider = UserProvider.instantiate(conf); + assertTrue("Client principal or keytab is empty", provider.shouldLoginFromKeytab()); + + provider.login(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL); + User loginUser = provider.getCurrent(); + assertEquals(CLIENT_NAME, loginUser.getShortName()); + assertEquals(getClientPrincipalForTesting(), loginUser.getName()); + } + + @Test + public void testAuthUtilLogin() throws Exception { + String clientKeytab = getClientKeytabForTesting(); + String clientPrincipal = getClientPrincipalForTesting(); + Configuration conf = getSecuredConfiguration(); + conf.set(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, clientKeytab); + conf.set(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL, clientPrincipal); + UserGroupInformation.setConfiguration(conf); + + User user = AuthUtil.loginClient(conf); + assertTrue(user.isLoginFromKeytab()); + assertEquals(CLIENT_NAME, user.getShortName()); + assertEquals(getClientPrincipalForTesting(), user.getName()); + } } diff --git a/src/main/asciidoc/_chapters/security.adoc b/src/main/asciidoc/_chapters/security.adoc index dae6c539523..1afc131cdb3 100644 --- a/src/main/asciidoc/_chapters/security.adoc +++ b/src/main/asciidoc/_chapters/security.adoc @@ -179,7 +179,25 @@ Add the following to the `hbase-site.xml` file on every client: ---- -The client environment must be logged in to Kerberos from KDC or keytab via the `kinit` command before communication with the HBase cluster will be possible. +Before 2.2.0 version, the client environment must be logged in to Kerberos from KDC or keytab via the `kinit` command before communication with the HBase cluster will be possible. + +Since 2.2.0, client can specify the following configurations in `hbase-site.xml`: +[source,xml] +---- + + hbase.client.keytab.file + /local/path/to/client/keytab + + + + hbase.client.keytab.principal + foo@EXAMPLE.COM + +---- +Then application can automatically do the login and credential renewal jobs without client interference. + +It's optional feature, client, who upgrades to 2.2.0, can still keep their login and credential renewal logic already did in older version, as long as keeping `hbase.client.keytab.file` +and `hbase.client.keytab.principal` are unset. Be advised that if the `hbase.security.authentication` in the client- and server-side site files do not match, the client will not be able to communicate with the cluster.