HBASE-24443 Refactor TestCustomSaslAuthenticationProvider (#1790)

Signed-off-by: Guanghao Zhang <zghao@apache.org>
This commit is contained in:
Duo Zhang 2020-05-28 20:44:31 +08:00 committed by GitHub
parent 79d56499ca
commit f06248ef84
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 193 additions and 141 deletions

View File

@ -30,7 +30,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -52,7 +51,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.LocalHBaseCluster;
@ -72,10 +70,8 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.MasterRegistryFetchException; import org.apache.hadoop.hbase.exceptions.MasterRegistryFetchException;
import org.apache.hadoop.hbase.ipc.BlockingRpcClient; import org.apache.hadoop.hbase.ipc.BlockingRpcClient;
import org.apache.hadoop.hbase.ipc.NettyRpcClient; import org.apache.hadoop.hbase.ipc.NettyRpcClient;
import org.apache.hadoop.hbase.ipc.NettyRpcServer;
import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcClientFactory;
import org.apache.hadoop.hbase.ipc.RpcServerFactory; import org.apache.hadoop.hbase.ipc.RpcServerFactory;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils; import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.SaslUtil;
@ -83,8 +79,6 @@ import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.SecureTestCluster; import org.apache.hadoop.hbase.security.token.SecureTestCluster;
import org.apache.hadoop.hbase.security.token.TokenProvider; import org.apache.hadoop.hbase.security.token.TokenProvider;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
@ -100,13 +94,10 @@ import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.runners.Parameterized.Parameter;
import org.junit.runner.RunWith; import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -116,48 +107,30 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformati
/** /**
* Tests the pluggable authentication framework with SASL using a contrived authentication system. * Tests the pluggable authentication framework with SASL using a contrived authentication system.
* * This tests holds a "user database" in memory as a hashmap. Clients provide their password in the
* This tests holds a "user database" in memory as a hashmap. Clients provide their password * client Hadoop configuration. The servers validate this password via the "user database".
* in the client Hadoop configuration. The servers validate this password via the "user database".
*/ */
@RunWith(Parameterized.class) public abstract class CustomSaslAuthenticationProviderTestBase {
@Category({MediumTests.class, SecurityTests.class})
public class TestCustomSaslAuthenticationProvider {
private static final Logger LOG = LoggerFactory.getLogger(
TestCustomSaslAuthenticationProvider.class);
@ClassRule private static final Logger LOG =
public static final HBaseClassTestRule CLASS_RULE = LoggerFactory.getLogger(CustomSaslAuthenticationProviderTestBase.class);
HBaseClassTestRule.forClass(TestCustomSaslAuthenticationProvider.class);
private static final Map<String,String> USER_DATABASE = createUserDatabase(); private static final Map<String, String> USER_DATABASE = createUserDatabase();
private static final String USER1_PASSWORD = "foobarbaz"; private static final String USER1_PASSWORD = "foobarbaz";
private static final String USER2_PASSWORD = "bazbarfoo"; private static final String USER2_PASSWORD = "bazbarfoo";
@Parameterized.Parameters(name = "{index}: rpcClientImpl={0}, rpcServerImpl={1}") @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
List<Object[]> params = new ArrayList<>(); return Arrays.asList(new Object[] { BlockingRpcClient.class.getName() },
List<String> rpcClientImpls = Arrays.asList( new Object[] { NettyRpcClient.class.getName() });
BlockingRpcClient.class.getName(), NettyRpcClient.class.getName());
List<String> rpcServerImpls = Arrays.asList(
SimpleRpcServer.class.getName(), NettyRpcServer.class.getName());
for (String rpcClientImpl : rpcClientImpls) {
for (String rpcServerImpl : rpcServerImpls) {
params.add(new Object[] { rpcClientImpl, rpcServerImpl });
}
}
return params;
} }
@Parameterized.Parameter(0) @Parameter
public String rpcClientImpl; public String rpcClientImpl;
@Parameterized.Parameter(1) private static Map<String, String> createUserDatabase() {
public String rpcServerImpl; Map<String, String> db = new ConcurrentHashMap<>();
private static Map<String,String> createUserDatabase() {
Map<String,String> db = new ConcurrentHashMap<>();
db.put("user1", USER1_PASSWORD); db.put("user1", USER1_PASSWORD);
db.put("user2", USER2_PASSWORD); db.put("user2", USER2_PASSWORD);
return db; return db;
@ -172,14 +145,15 @@ public class TestCustomSaslAuthenticationProvider {
} }
/** /**
* A custom token identifier for our custom auth'n method. Unique from the TokenIdentifier * A custom token identifier for our custom auth'n method. Unique from the TokenIdentifier used
* used for delegation tokens. * for delegation tokens.
*/ */
public static class PasswordAuthTokenIdentifier extends TokenIdentifier { public static class PasswordAuthTokenIdentifier extends TokenIdentifier {
public static final Text PASSWORD_AUTH_TOKEN = new Text("HBASE_PASSWORD_TEST_TOKEN"); public static final Text PASSWORD_AUTH_TOKEN = new Text("HBASE_PASSWORD_TEST_TOKEN");
private String username; private String username;
public PasswordAuthTokenIdentifier() {} public PasswordAuthTokenIdentifier() {
}
public PasswordAuthTokenIdentifier(String username) { public PasswordAuthTokenIdentifier(String username) {
this.username = username; this.username = username;
@ -209,29 +183,29 @@ public class TestCustomSaslAuthenticationProvider {
} }
} }
public static Token<? extends TokenIdentifier> createPasswordToken( public static Token<? extends TokenIdentifier> createPasswordToken(String username,
String username, String password, String clusterId) { String password, String clusterId) {
PasswordAuthTokenIdentifier id = new PasswordAuthTokenIdentifier(username); PasswordAuthTokenIdentifier id = new PasswordAuthTokenIdentifier(username);
Token<? extends TokenIdentifier> token = new Token<>(id.getBytes(), Bytes.toBytes(password), Token<? extends TokenIdentifier> token =
id.getKind(), new Text(clusterId)); new Token<>(id.getBytes(), Bytes.toBytes(password), id.getKind(), new Text(clusterId));
return token; return token;
} }
/** /**
* Client provider that finds custom Token in the user's UGI and authenticates with the server * Client provider that finds custom Token in the user's UGI and authenticates with the server via
* via DIGEST-MD5 using that password. * DIGEST-MD5 using that password.
*/ */
public static class InMemoryClientProvider extends AbstractSaslClientAuthenticationProvider { public static class InMemoryClientProvider extends AbstractSaslClientAuthenticationProvider {
public static final String MECHANISM = "DIGEST-MD5"; public static final String MECHANISM = "DIGEST-MD5";
public static final SaslAuthMethod SASL_AUTH_METHOD = new SaslAuthMethod( public static final SaslAuthMethod SASL_AUTH_METHOD =
"IN_MEMORY", (byte)42, MECHANISM, AuthenticationMethod.TOKEN); new SaslAuthMethod("IN_MEMORY", (byte) 42, MECHANISM, AuthenticationMethod.TOKEN);
@Override @Override
public SaslClient createClient(Configuration conf, InetAddress serverAddr, public SaslClient createClient(Configuration conf, InetAddress serverAddr,
SecurityInfo securityInfo, Token<? extends TokenIdentifier> token, boolean fallbackAllowed, SecurityInfo securityInfo, Token<? extends TokenIdentifier> token, boolean fallbackAllowed,
Map<String, String> saslProps) throws IOException { Map<String, String> saslProps) throws IOException {
return Sasl.createSaslClient(new String[] { MECHANISM }, null, null, return Sasl.createSaslClient(new String[] { MECHANISM }, null, null,
SaslUtil.SASL_DEFAULT_REALM, saslProps, new InMemoryClientProviderCallbackHandler(token)); SaslUtil.SASL_DEFAULT_REALM, saslProps, new InMemoryClientProviderCallbackHandler(token));
} }
public Optional<Token<? extends TokenIdentifier>> findToken(User user) { public Optional<Token<? extends TokenIdentifier>> findToken(User user) {
@ -253,11 +227,12 @@ public class TestCustomSaslAuthenticationProvider {
} }
/** /**
* Sasl CallbackHandler which extracts information from our custom token and places * Sasl CallbackHandler which extracts information from our custom token and places it into the
* it into the Sasl objects. * Sasl objects.
*/ */
public class InMemoryClientProviderCallbackHandler implements CallbackHandler { public class InMemoryClientProviderCallbackHandler implements CallbackHandler {
private final Token<? extends TokenIdentifier> token; private final Token<? extends TokenIdentifier> token;
public InMemoryClientProviderCallbackHandler(Token<? extends TokenIdentifier> token) { public InMemoryClientProviderCallbackHandler(Token<? extends TokenIdentifier> token) {
this.token = token; this.token = token;
} }
@ -302,21 +277,21 @@ public class TestCustomSaslAuthenticationProvider {
* Server provider which validates credentials from an in-memory database. * Server provider which validates credentials from an in-memory database.
*/ */
public static class InMemoryServerProvider extends InMemoryClientProvider public static class InMemoryServerProvider extends InMemoryClientProvider
implements SaslServerAuthenticationProvider { implements SaslServerAuthenticationProvider {
@Override @Override
public AttemptingUserProvidingSaslServer createServer( public AttemptingUserProvidingSaslServer
SecretManager<TokenIdentifier> secretManager, createServer(SecretManager<TokenIdentifier> secretManager, Map<String, String> saslProps)
Map<String, String> saslProps) throws IOException { throws IOException {
return new AttemptingUserProvidingSaslServer( return new AttemptingUserProvidingSaslServer(
Sasl.createSaslServer(getSaslAuthMethod().getSaslMechanism(), null, Sasl.createSaslServer(getSaslAuthMethod().getSaslMechanism(), null,
SaslUtil.SASL_DEFAULT_REALM, saslProps, new InMemoryServerProviderCallbackHandler()), SaslUtil.SASL_DEFAULT_REALM, saslProps, new InMemoryServerProviderCallbackHandler()),
() -> null); () -> null);
} }
/** /**
* Pulls the correct password for the user who started the SASL handshake so that SASL * Pulls the correct password for the user who started the SASL handshake so that SASL can
* can validate that the user provided the right password. * validate that the user provided the right password.
*/ */
private class InMemoryServerProviderCallbackHandler implements CallbackHandler { private class InMemoryServerProviderCallbackHandler implements CallbackHandler {
@ -344,11 +319,11 @@ public class TestCustomSaslAuthenticationProvider {
try { try {
id.readFields(new DataInputStream(new ByteArrayInputStream(encodedId))); id.readFields(new DataInputStream(new ByteArrayInputStream(encodedId)));
} catch (IOException e) { } catch (IOException e) {
throw (InvalidToken) new InvalidToken( throw (InvalidToken) new InvalidToken("Can't de-serialize tokenIdentifier")
"Can't de-serialize tokenIdentifier").initCause(e); .initCause(e);
} }
char[] actualPassword = SaslUtil.encodePassword( char[] actualPassword =
Bytes.toBytes(getPassword(id.getUser().getUserName()))); SaslUtil.encodePassword(Bytes.toBytes(getPassword(id.getUser().getUserName())));
pc.setPassword(actualPassword); pc.setPassword(actualPassword);
} }
if (ac != null) { if (ac != null) {
@ -373,7 +348,7 @@ public class TestCustomSaslAuthenticationProvider {
@Override @Override
public UserGroupInformation getAuthorizedUgi(String authzId, public UserGroupInformation getAuthorizedUgi(String authzId,
SecretManager<TokenIdentifier> secretManager) throws IOException { SecretManager<TokenIdentifier> secretManager) throws IOException {
UserGroupInformation authorizedUgi; UserGroupInformation authorizedUgi;
byte[] encodedId = SaslUtil.decodeIdentifier(authzId); byte[] encodedId = SaslUtil.decodeIdentifier(authzId);
PasswordAuthTokenIdentifier tokenId = new PasswordAuthTokenIdentifier(); PasswordAuthTokenIdentifier tokenId = new PasswordAuthTokenIdentifier();
@ -384,8 +359,7 @@ public class TestCustomSaslAuthenticationProvider {
} }
authorizedUgi = tokenId.getUser(); authorizedUgi = tokenId.getUser();
if (authorizedUgi == null) { if (authorizedUgi == null) {
throw new AccessDeniedException( throw new AccessDeniedException("Can't retrieve username from tokenIdentifier.");
"Can't retrieve username from tokenIdentifier.");
} }
authorizedUgi.addTokenIdentifier(tokenId); authorizedUgi.addTokenIdentifier(tokenId);
authorizedUgi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); authorizedUgi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod());
@ -394,30 +368,28 @@ public class TestCustomSaslAuthenticationProvider {
} }
/** /**
* Custom provider which can select our custom provider, amongst other tokens which * Custom provider which can select our custom provider, amongst other tokens which may be
* may be available. * available.
*/ */
public static class InMemoryProviderSelector extends BuiltInProviderSelector { public static class InMemoryProviderSelector extends BuiltInProviderSelector {
private InMemoryClientProvider inMemoryProvider; private InMemoryClientProvider inMemoryProvider;
@Override @Override
public void configure(Configuration conf, public void configure(Configuration conf,
Collection<SaslClientAuthenticationProvider> providers) { Collection<SaslClientAuthenticationProvider> providers) {
super.configure(conf, providers); super.configure(conf, providers);
Optional<SaslClientAuthenticationProvider> o = providers.stream() Optional<SaslClientAuthenticationProvider> o =
.filter((p) -> p instanceof InMemoryClientProvider) providers.stream().filter((p) -> p instanceof InMemoryClientProvider).findAny();
.findAny();
inMemoryProvider = (InMemoryClientProvider) o.orElseThrow( inMemoryProvider = (InMemoryClientProvider) o.orElseThrow(() -> new RuntimeException(
() -> new RuntimeException("InMemoryClientProvider not found in available providers: " "InMemoryClientProvider not found in available providers: " + providers));
+ providers));
} }
@Override @Override
public Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>> selectProvider( public Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>>
String clusterId, User user) { selectProvider(String clusterId, User user) {
Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>> superPair = Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>> superPair =
super.selectProvider(clusterId, user); super.selectProvider(clusterId, user);
Optional<Token<? extends TokenIdentifier>> optional = inMemoryProvider.findToken(user); Optional<Token<? extends TokenIdentifier>> optional = inMemoryProvider.findToken(user);
if (optional.isPresent()) { if (optional.isPresent()) {
@ -430,21 +402,21 @@ public class TestCustomSaslAuthenticationProvider {
} }
} }
static void createBaseCluster(HBaseTestingUtility util, File keytabFile, private static void createBaseCluster(HBaseTestingUtility util, File keytabFile, MiniKdc kdc)
MiniKdc kdc) throws Exception { throws Exception {
String servicePrincipal = "hbase/localhost"; String servicePrincipal = "hbase/localhost";
String spnegoPrincipal = "HTTP/localhost"; String spnegoPrincipal = "HTTP/localhost";
kdc.createPrincipal(keytabFile, servicePrincipal); kdc.createPrincipal(keytabFile, servicePrincipal);
util.startMiniZKCluster(); util.startMiniZKCluster();
HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(), HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(),
servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm()); servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm());
HBaseKerberosUtils.setSSLConfiguration(util, SecureTestCluster.class); HBaseKerberosUtils.setSSLConfiguration(util, SecureTestCluster.class);
util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
TokenProvider.class.getName()); TokenProvider.class.getName());
util.startMiniDFSCluster(1); util.startMiniDFSCluster(1);
Path rootdir = util.getDataTestDirOnTestFS("TestGenerateDelegationToken"); Path rootdir = util.getDataTestDirOnTestFS("TestCustomSaslAuthenticationProvider");
CommonFSUtils.setRootDir(util.getConfiguration(), rootdir); CommonFSUtils.setRootDir(util.getConfiguration(), rootdir);
} }
@ -453,75 +425,65 @@ public class TestCustomSaslAuthenticationProvider {
private static LocalHBaseCluster CLUSTER; private static LocalHBaseCluster CLUSTER;
private static File KEYTAB_FILE; private static File KEYTAB_FILE;
@BeforeClass protected static void startCluster(String rpcServerImpl) throws Exception {
public static void setupCluster() throws Exception { KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
KEYTAB_FILE = new File(
UTIL.getDataTestDir("keytab").toUri().getPath());
final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE); final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE);
// Adds our test impls instead of creating service loader entries which // Adds our test impls instead of creating service loader entries which
// might inadvertently get them loaded on a real cluster. // might inadvertently get them loaded on a real cluster.
CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY,
InMemoryClientProvider.class.getName()); InMemoryClientProvider.class.getName());
CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY,
InMemoryServerProvider.class.getName()); InMemoryServerProvider.class.getName());
CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY,
InMemoryProviderSelector.class.getName()); InMemoryProviderSelector.class.getName());
createBaseCluster(UTIL, KEYTAB_FILE, kdc); createBaseCluster(UTIL, KEYTAB_FILE, kdc);
}
@Before
public void setUpBeforeTest() throws Exception {
CONF.unset(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY);
CONF.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl);
CONF.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl); CONF.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl);
if (rpcClientImpl.equals(BlockingRpcClient.class.getName())) {
// Set the connection registry to ZKConnectionRegistry since hedging is not supported on
// blocking rpc clients.
CONF.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY,
HConstants.ZK_CONNECTION_REGISTRY_CLASS);
}
CLUSTER = new LocalHBaseCluster(CONF, 1); CLUSTER = new LocalHBaseCluster(CONF, 1);
CLUSTER.startup(); CLUSTER.startup();
createTable();
} }
@AfterClass @AfterClass
public static void teardownCluster() throws Exception { public static void shutdownCluster() throws Exception {
if (CLUSTER != null) { if (CLUSTER != null) {
CLUSTER.shutdown(); CLUSTER.shutdown();
CLUSTER = null; CLUSTER = null;
} }
UTIL.shutdownMiniDFSCluster();
UTIL.shutdownMiniZKCluster(); UTIL.shutdownMiniZKCluster();
UTIL.cleanupTestDir();
}
@Before
public void setUp() throws Exception {
createTable();
} }
@After @After
public void shutDownCluster() throws IOException { public void tearDown() throws IOException {
if (CLUSTER != null) { UTIL.deleteTable(name.getTableName());
UTIL.deleteTable(name.getTableName());
CLUSTER.shutdown();
}
} }
@Rule @Rule
public TableNameTestRule name = new TableNameTestRule(); public TableNameTestRule name = new TableNameTestRule();
TableName tableName;
String clusterId;
public void createTable() throws Exception { private TableName tableName;
private String clusterId;
private void createTable() throws Exception {
tableName = name.getTableName(); tableName = name.getTableName();
// Create a table and write a record as the service user (hbase) // Create a table and write a record as the service user (hbase)
UserGroupInformation serviceUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI( UserGroupInformation serviceUgi = UserGroupInformation
"hbase/localhost", KEYTAB_FILE.getAbsolutePath()); .loginUserFromKeytabAndReturnUGI("hbase/localhost", KEYTAB_FILE.getAbsolutePath());
clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() { clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() {
@Override public String run() throws Exception { @Override
public String run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(CONF); try (Connection conn = ConnectionFactory.createConnection(CONF);
Admin admin = conn.getAdmin();) { Admin admin = conn.getAdmin();) {
admin.createTable(TableDescriptorBuilder admin.createTable(TableDescriptorBuilder.newBuilder(tableName)
.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build());
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1"))
.build());
UTIL.waitTableAvailable(tableName); UTIL.waitTableAvailable(tableName);
@ -535,21 +497,25 @@ public class TestCustomSaslAuthenticationProvider {
} }
} }
}); });
assertNotNull(clusterId); assertNotNull(clusterId);
} }
private Configuration getClientConf() {
Configuration conf = new Configuration(CONF);
conf.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl);
return conf;
}
@Test @Test
public void testPositiveAuthentication() throws Exception { public void testPositiveAuthentication() throws Exception {
// Validate that we can read that record back out as the user with our custom auth'n // Validate that we can read that record back out as the user with our custom auth'n
final Configuration clientConf = new Configuration(CONF); UserGroupInformation user1 = UserGroupInformation.createUserForTesting("user1", new String[0]);
UserGroupInformation user1 = UserGroupInformation.createUserForTesting(
"user1", new String[0]);
user1.addToken(createPasswordToken("user1", USER1_PASSWORD, clusterId)); user1.addToken(createPasswordToken("user1", USER1_PASSWORD, clusterId));
user1.doAs(new PrivilegedExceptionAction<Void>() { user1.doAs(new PrivilegedExceptionAction<Void>() {
@Override public Void run() throws Exception { @Override
try (Connection conn = ConnectionFactory.createConnection(clientConf); public Void run() throws Exception {
Table t = conn.getTable(tableName)) { try (Connection conn = ConnectionFactory.createConnection(getClientConf());
Table t = conn.getTable(tableName)) {
Result r = t.get(new Get(Bytes.toBytes("r1"))); Result r = t.get(new Get(Bytes.toBytes("r1")));
assertNotNull(r); assertNotNull(r);
assertFalse("Should have read a non-empty Result", r.isEmpty()); assertFalse("Should have read a non-empty Result", r.isEmpty());
@ -565,20 +531,20 @@ public class TestCustomSaslAuthenticationProvider {
@Test @Test
public void testNegativeAuthentication() throws Exception { public void testNegativeAuthentication() throws Exception {
// Validate that we can read that record back out as the user with our custom auth'n // Validate that we can read that record back out as the user with our custom auth'n
final Configuration clientConf = new Configuration(CONF); UserGroupInformation user1 = UserGroupInformation.createUserForTesting("user1", new String[0]);
clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3);
UserGroupInformation user1 = UserGroupInformation.createUserForTesting(
"user1", new String[0]);
user1.addToken(createPasswordToken("user1", "definitely not the password", clusterId)); user1.addToken(createPasswordToken("user1", "definitely not the password", clusterId));
user1.doAs(new PrivilegedExceptionAction<Void>() { user1.doAs(new PrivilegedExceptionAction<Void>() {
@Override public Void run() throws Exception { @Override
public Void run() throws Exception {
Configuration clientConf = getClientConf();
clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
// Depending on the registry in use, the following code can throw exceptions at different // Depending on the registry in use, the following code can throw exceptions at different
// places. Master registry fails at the createConnection() step because the RPC to the // places. Master registry fails at the createConnection() step because the RPC to the
// master fails with sasl auth. With ZK registry, connection creation succeeds (since there // master fails with sasl auth. With ZK registry, connection creation succeeds (since there
// is no RPC to HBase services involved) but the subsequent get() fails. The root cause // is no RPC to HBase services involved) but the subsequent get() fails. The root cause
// should still be a SaslException in both the cases. // should still be a SaslException in both the cases.
try (Connection conn = ConnectionFactory.createConnection(clientConf); try (Connection conn = ConnectionFactory.createConnection(clientConf);
Table t = conn.getTable(tableName)) { Table t = conn.getTable(tableName)) {
t.get(new Get(Bytes.toBytes("r1"))); t.get(new Get(Bytes.toBytes("r1")));
fail("Should not successfully authenticate with HBase"); fail("Should not successfully authenticate with HBase");
} catch (MasterRegistryFetchException mfe) { } catch (MasterRegistryFetchException mfe) {
@ -588,8 +554,8 @@ public class TestCustomSaslAuthenticationProvider {
assertTrue(re.getMessage(), re.getMessage().contains("SaslException")); assertTrue(re.getMessage(), re.getMessage().contains("SaslException"));
} catch (Exception e) { } catch (Exception e) {
// Any other exception is unexpected. // Any other exception is unexpected.
fail("Unexpected exception caught, was expecting a authentication error: " fail("Unexpected exception caught, was expecting a authentication error: " +
+ Throwables.getStackTraceAsString(e)); Throwables.getStackTraceAsString(e));
} }
return null; return null;
} }

View File

@ -0,0 +1,43 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security.provider;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.ipc.NettyRpcServer;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
@Category({ MediumTests.class, SecurityTests.class })
public class TestCustomSaslAuthenticationProviderNettyRpcServer
extends CustomSaslAuthenticationProviderTestBase {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCustomSaslAuthenticationProviderNettyRpcServer.class);
@BeforeClass
public static void setUpBeforeClass() throws Exception {
startCluster(NettyRpcServer.class.getName());
}
}

View File

@ -0,0 +1,43 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security.provider;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
@Category({ MediumTests.class, SecurityTests.class })
public class TestCustomSaslAuthenticationProviderSimpleRpcServer
extends CustomSaslAuthenticationProviderTestBase {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCustomSaslAuthenticationProviderSimpleRpcServer.class);
@BeforeClass
public static void setUpBeforeClass() throws Exception {
startCluster(SimpleRpcServer.class.getName());
}
}