HBASE-11449 IntegrationTestIngestWithACL fails to use different users

after HBASE-10810 (Ram)
This commit is contained in:
Ramkrishna 2014-07-03 12:03:59 +05:30
parent b79d6bf729
commit 80f0a522f9
5 changed files with 13 additions and 20 deletions

View File

@ -58,6 +58,7 @@ public class IntegrationTestIngestWithACL extends IntegrationTestIngest {
conf.setInt(HFile.FORMAT_VERSION_KEY, 3); conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
conf.set("hbase.coprocessor.master.classes", AccessController.class.getName()); conf.set("hbase.coprocessor.master.classes", AccessController.class.getName());
conf.set("hbase.coprocessor.region.classes", AccessController.class.getName()); conf.set("hbase.coprocessor.region.classes", AccessController.class.getName());
conf.setBoolean("hbase.security.access.early_out", false);
// conf.set("hbase.superuser", "admin"); // conf.set("hbase.superuser", "admin");
super.setUpCluster(); super.setUpCluster();
} }

View File

@ -206,7 +206,7 @@ public class LoadTestTool extends AbstractHBaseTool {
private String superUser; private String superUser;
private String userNames = "user1, user2, user3, user4"; private String userNames;
//This file is used to read authentication information in secure clusters. //This file is used to read authentication information in secure clusters.
private String authnFileName; private String authnFileName;
@ -559,7 +559,7 @@ public class LoadTestTool extends AbstractHBaseTool {
minColsPerKey, maxColsPerKey, COLUMN_FAMILY); minColsPerKey, maxColsPerKey, COLUMN_FAMILY);
} }
if (User.isHBaseSecurityEnabled(conf) && userOwner != null) { if (userOwner != null) {
LOG.info("Granting permissions for user " + userOwner.getShortName()); LOG.info("Granting permissions for user " + userOwner.getShortName());
AccessControlProtos.Permission.Action[] actions = { AccessControlProtos.Permission.Action[] actions = {
AccessControlProtos.Permission.Action.ADMIN, AccessControlProtos.Permission.Action.CREATE, AccessControlProtos.Permission.Action.ADMIN, AccessControlProtos.Permission.Action.CREATE,
@ -576,20 +576,10 @@ public class LoadTestTool extends AbstractHBaseTool {
// This will be comma separated list of expressions. // This will be comma separated list of expressions.
String users[] = userNames.split(","); String users[] = userNames.split(",");
User user = null; User user = null;
for (String userStr : users) {
if (User.isHBaseSecurityEnabled(conf)) { if (User.isHBaseSecurityEnabled(conf)) {
for (String userStr : users) {
user = User.create(loginAndReturnUGI(conf, userStr)); user = User.create(loginAndReturnUGI(conf, userStr));
LOG.info("Granting READ permission for the user " + user.getShortName());
AccessControlProtos.Permission.Action[] actions = { AccessControlProtos.Permission.Action.READ };
try {
AccessControlClient.grant(conf, tableName, user.getShortName(), null, null, actions);
} catch (Throwable e) {
LOG.fatal("Error in granting READ permission for the user " + user.getShortName(), e);
return EXIT_FAILURE;
}
}
} else { } else {
for (String userStr : users) {
user = User.createUserForTesting(conf, userStr, new String[0]); user = User.createUserForTesting(conf, userStr, new String[0]);
} }
} }

View File

@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
@ -99,7 +100,7 @@ public class MultiThreadedReaderWithACL extends MultiThreadedReader {
int specialPermCellInsertionFactor = Integer.parseInt(dataGenerator.getArgs()[2]); int specialPermCellInsertionFactor = Integer.parseInt(dataGenerator.getArgs()[2]);
int mod = ((int) keyToRead % userNames.length); int mod = ((int) keyToRead % userNames.length);
if (userVsTable.get(userNames[mod]) == null) { if (userVsTable.get(userNames[mod]) == null) {
localTable = connection.getTable(tableName); localTable = new HTable(conf, tableName);
userVsTable.put(userNames[mod], localTable); userVsTable.put(userNames[mod], localTable);
result = localTable.get(get); result = localTable.get(get);
} else { } else {
@ -107,7 +108,6 @@ public class MultiThreadedReaderWithACL extends MultiThreadedReader {
result = localTable.get(get); result = localTable.get(get);
} }
boolean isNullExpected = ((((int) keyToRead % specialPermCellInsertionFactor)) == 0); boolean isNullExpected = ((((int) keyToRead % specialPermCellInsertionFactor)) == 0);
LOG.info("Read happening from ACL " + isNullExpected);
long end = System.nanoTime(); long end = System.nanoTime();
verifyResultsAndUpdateMetrics(verify, get, end - start, result, localTable, isNullExpected); verifyResultsAndUpdateMetrics(verify, get, end - start, result, localTable, isNullExpected);
} catch (IOException e) { } catch (IOException e) {

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
@ -116,7 +117,7 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
try { try {
int mod = ((int) rowKeyBase % userNames.length); int mod = ((int) rowKeyBase % userNames.length);
if (userVsTable.get(userNames[mod]) == null) { if (userVsTable.get(userNames[mod]) == null) {
localTable = connection.getTable(tableName); localTable = new HTable(conf, tableName);
userVsTable.put(userNames[mod], localTable); userVsTable.put(userNames[mod], localTable);
res = localTable.get(get); res = localTable.get(get);
} else { } else {
@ -225,7 +226,7 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
public Object run() throws Exception { public Object run() throws Exception {
try { try {
if (table == null) { if (table == null) {
table = connection.getTable(tableName); table = new HTable(conf, tableName);
} }
if (m instanceof Increment) { if (m instanceof Increment) {
table.increment((Increment) m); table.increment((Increment) m);

View File

@ -26,6 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
@ -125,7 +126,7 @@ public class MultiThreadedWriterWithACL extends MultiThreadedWriter {
public Object run() throws Exception { public Object run() throws Exception {
try { try {
if (table == null) { if (table == null) {
table = connection.getTable(tableName); table = new HTable(conf, tableName);
} }
table.put(put); table.put(put);
} catch (IOException e) { } catch (IOException e) {