HBASE-13755 Provide single super user check implementation (Mikhail Antonov)

This commit is contained in:
Srikanth Srungarapu 2015-06-11 20:51:05 -07:00
parent 8cef99e506
commit 7595bdfb1a
19 changed files with 217 additions and 260 deletions

View File

@ -39,6 +39,9 @@ import org.apache.hadoop.security.UserGroupInformation;
public class AuthUtil { public class AuthUtil {
private static final Log LOG = LogFactory.getLog(AuthUtil.class); private static final Log LOG = LogFactory.getLog(AuthUtil.class);
/** Prefix character to denote group names */
public static final String GROUP_PREFIX = "@";
private AuthUtil() { private AuthUtil() {
super(); super();
} }
@ -100,4 +103,32 @@ public class AuthUtil {
return refreshCredentials; return refreshCredentials;
} }
/**
* Returns whether or not the given name should be interpreted as a group
* principal. Currently this simply checks if the name starts with the
* special group prefix character ("@").
*/
public static boolean isGroupPrincipal(String name) {
return name != null && name.startsWith(GROUP_PREFIX);
}
/**
* Returns the actual name for a group principal (stripped of the
* group prefix).
*/
public static String getGroupName(String aclKey) {
if (!isGroupPrincipal(aclKey)) {
return aclKey;
}
return aclKey.substring(GROUP_PREFIX.length());
}
/**
* Returns the group entry with the group prefix for a group principal.
*/
public static String toGroupEntry(String name) {
return GROUP_PREFIX + name;
}
} }

View File

@ -0,0 +1,103 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Keeps lists of superusers and super groups loaded from HBase configuration,
* checks if certain user is regarded as superuser.
*/
@InterfaceAudience.Private
public final class Superusers {
private static final Log LOG = LogFactory.getLog(Superusers.class);
/** Configuration key for superusers */
public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name
private static List<String> superUsers;
private static List<String> superGroups;
private Superusers(){}
/**
* Should be called only once to pre-load list of super users and super
* groups from Configuration. This operation is idempotent.
* @param conf configuration to load users from
* @throws IOException if unable to initialize lists of superusers or super groups
* @throws IllegalStateException if current user is null
*/
public static void initialize(Configuration conf) throws IOException {
superUsers = new ArrayList<>();
superGroups = new ArrayList<>();
User user = User.getCurrent();
if (user == null) {
throw new IllegalStateException("Unable to obtain the current user, "
+ "authorization checks for internal operations will not work correctly!");
}
if (LOG.isTraceEnabled()) {
LOG.trace("Current user name is " + user.getShortName());
}
String currentUser = user.getShortName();
String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, new String[0]);
for (String name : superUserList) {
if (AuthUtil.isGroupPrincipal(name)) {
superGroups.add(AuthUtil.getGroupName(name));
} else {
superUsers.add(name);
}
}
superUsers.add(currentUser);
}
/**
* @return true if current user is a super user (whether as user running process,
* declared as individual superuser or member of supergroup), false otherwise.
* @param user to check
* @throws IllegalStateException if lists of superusers/super groups
* haven't been initialized properly
*/
public static boolean isSuperUser(User user) {
if (superUsers == null) {
throw new IllegalStateException("Super users/super groups lists"
+ " haven't been initialized properly.");
}
if (superUsers.contains(user.getShortName())) {
return true;
}
for (String group : user.getGroupNames()) {
if (superGroups.contains(group)) {
return true;
}
}
return false;
}
}

View File

@ -228,7 +228,8 @@ public abstract class User {
*/ */
public static User createUserForTesting(Configuration conf, public static User createUserForTesting(Configuration conf,
String name, String[] groups) { String name, String[] groups) {
return SecureHadoopUser.createUserForTesting(conf, name, groups); User userForTesting = SecureHadoopUser.createUserForTesting(conf, name, groups);
return userForTesting;
} }
/** /**

View File

@ -17,11 +17,8 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -52,9 +49,8 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Message; import com.google.protobuf.Message;
import com.google.protobuf.TextFormat; import com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
import org.apache.hadoop.hbase.util.Pair;
/** /**
* Reads special method annotations and table names to figure a priority for use by QoS facility in * Reads special method annotations and table names to figure a priority for use by QoS facility in
@ -110,11 +106,6 @@ class AnnotationReadingPriorityFunction implements PriorityFunction {
private final float scanVirtualTimeWeight; private final float scanVirtualTimeWeight;
// lists of super users and super groups, used to route rpc calls made by
// superusers through high-priority (ADMIN_QOS) thread pool.
// made protected for tests
protected final HashSet<String> superUsers;
protected final HashSet<String> superGroups;
/** /**
* Calls {@link #AnnotationReadingPriorityFunction(RSRpcServices, Class)} using the result of * Calls {@link #AnnotationReadingPriorityFunction(RSRpcServices, Class)} using the result of
* {@code rpcServices#getClass()} * {@code rpcServices#getClass()}
@ -168,16 +159,6 @@ class AnnotationReadingPriorityFunction implements PriorityFunction {
Configuration conf = rpcServices.getConfiguration(); Configuration conf = rpcServices.getConfiguration();
scanVirtualTimeWeight = conf.getFloat(SCAN_VTIME_WEIGHT_CONF_KEY, 1.0f); scanVirtualTimeWeight = conf.getFloat(SCAN_VTIME_WEIGHT_CONF_KEY, 1.0f);
try {
// TODO Usage of VisibilityUtils API to be avoided with HBASE-13755
Pair<List<String>, List<String>> pair = VisibilityUtils.getSystemAndSuperUsers(rpcServices
.getConfiguration());
superUsers = new HashSet<>(pair.getFirst());
superGroups = new HashSet<>(pair.getSecond());
} catch (IOException e) {
throw new RuntimeException(e);
}
} }
private String capitalize(final String s) { private String capitalize(final String s) {
@ -203,8 +184,15 @@ class AnnotationReadingPriorityFunction implements PriorityFunction {
} }
// all requests executed by super users have high QoS // all requests executed by super users have high QoS
if (isExecutedBySuperUser(user)) { try {
return HConstants.ADMIN_QOS; if (Superusers.isSuperUser(user)) {
return HConstants.ADMIN_QOS;
}
} catch (IllegalStateException ex) {
// Not good throwing an exception out of here, a runtime anyways. Let the query go into the
// server and have it throw the exception if still an issue. Just mark it normal priority.
if (LOG.isTraceEnabled()) LOG.trace("Marking normal priority after getting exception=" + ex);
return HConstants.NORMAL_QOS;
} }
if (param == null) { if (param == null) {
@ -306,24 +294,4 @@ class AnnotationReadingPriorityFunction implements PriorityFunction {
void setRegionServer(final HRegionServer hrs) { void setRegionServer(final HRegionServer hrs) {
this.rpcServices = hrs.getRSRpcServices(); this.rpcServices = hrs.getRSRpcServices();
} }
/**
* @param user user running request
* @return true if user is super user, false otherwise
*/
private boolean isExecutedBySuperUser(User user) {
if (superUsers.contains(user.getShortName())) {
return true;
}
String[] groups = user.getGroupNames();
if (groups != null) {
for (String group : groups) {
if (superGroups.contains(group)) {
return true;
}
}
}
return false;
}
} }

View File

@ -138,6 +138,7 @@ import org.apache.hadoop.hbase.regionserver.handler.RegionReplicaFlushHandler;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationLoad; import org.apache.hadoop.hbase.replication.regionserver.ReplicationLoad;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.trace.SpanReceiverHost; import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Addressing;
@ -500,6 +501,7 @@ public class HRegionServer extends HasThread implements
HFile.checkHFileVersion(this.conf); HFile.checkHFileVersion(this.conf);
checkCodecs(this.conf); checkCodecs(this.conf);
this.userProvider = UserProvider.instantiate(conf); this.userProvider = UserProvider.instantiate(conf);
Superusers.initialize(conf);
FSUtils.setupShortCircuitRead(this.conf); FSUtils.setupShortCircuitRead(this.conf);
// Disable usage of meta replicas in the regionserver // Disable usage of meta replicas in the regionserver
this.conf.setBoolean(HConstants.USE_META_REPLICAS, false); this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);

View File

@ -34,6 +34,7 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -114,10 +115,6 @@ public class AccessControlLists {
* Delimiter to separate user, column family, and qualifier in * Delimiter to separate user, column family, and qualifier in
* _acl_ table info: column keys */ * _acl_ table info: column keys */
public static final char ACL_KEY_DELIMITER = ','; public static final char ACL_KEY_DELIMITER = ',';
/** Prefix character to denote group names */
public static final String GROUP_PREFIX = "@";
/** Configuration key for superusers */
public static final String SUPERUSER_CONF_KEY = "hbase.superuser";
private static final Log LOG = LogFactory.getLog(AccessControlLists.class); private static final Log LOG = LogFactory.getLog(AccessControlLists.class);
@ -619,34 +616,6 @@ public class AccessControlLists {
} }
} }
/**
* Returns whether or not the given name should be interpreted as a group
* principal. Currently this simply checks if the name starts with the
* special group prefix character ("@").
*/
public static boolean isGroupPrincipal(String name) {
return name != null && name.startsWith(GROUP_PREFIX);
}
/**
* Returns the actual name for a group principal (stripped of the
* group prefix).
*/
public static String getGroupName(String aclKey) {
if (!isGroupPrincipal(aclKey)) {
return aclKey;
}
return aclKey.substring(GROUP_PREFIX.length());
}
/**
* Returns the group entry with the group prefix for a group principal.
*/
public static String toGroupEntry(String name) {
return GROUP_PREFIX + name;
}
public static boolean isNamespaceEntry(String entryName) { public static boolean isNamespaceEntry(String entryName) {
return entryName.charAt(0) == NAMESPACE_PREFIX; return entryName.charAt(0) == NAMESPACE_PREFIX;
} }
@ -705,7 +674,7 @@ public class AccessControlLists {
String groupNames[] = user.getGroupNames(); String groupNames[] = user.getGroupNames();
if (groupNames != null) { if (groupNames != null) {
for (String group : groupNames) { for (String group : groupNames) {
List<Permission> groupPerms = kvPerms.get(GROUP_PREFIX + group); List<Permission> groupPerms = kvPerms.get(AuthUtil.toGroupEntry(group));
if (results != null) { if (results != null) {
results.addAll(groupPerms); results.addAll(groupPerms);
} }

View File

@ -96,6 +96,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.security.access.Permission.Action;
@ -181,9 +182,6 @@ public class AccessController extends BaseMasterAndRegionObserver
/** Provider for mapping principal names to Users */ /** Provider for mapping principal names to Users */
private UserProvider userProvider; private UserProvider userProvider;
/** The list of users with superuser authority */
private List<String> superusers;
/** if we are active, usually true, only not true if "hbase.security.authorization" /** if we are active, usually true, only not true if "hbase.security.authorization"
has been set to false in site configuration */ has been set to false in site configuration */
boolean authorizationEnabled; boolean authorizationEnabled;
@ -891,7 +889,7 @@ public class AccessController extends BaseMasterAndRegionObserver
return; return;
} }
// Superusers are allowed to store cells unconditionally. // Superusers are allowed to store cells unconditionally.
if (superusers.contains(user.getShortName())) { if (Superusers.isSuperUser(user)) {
m.setAttribute(TAG_CHECK_PASSED, TRUE); m.setAttribute(TAG_CHECK_PASSED, TRUE);
return; return;
} }
@ -955,11 +953,6 @@ public class AccessController extends BaseMasterAndRegionObserver
// set the user-provider. // set the user-provider.
this.userProvider = UserProvider.instantiate(env.getConfiguration()); this.userProvider = UserProvider.instantiate(env.getConfiguration());
// set up the list of users with superuser privilege
User user = userProvider.getCurrent();
superusers = Lists.asList(user.getShortName(),
conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0]));
// If zk is null or IOException while obtaining auth manager, // If zk is null or IOException while obtaining auth manager,
// throw RuntimeException so that the coprocessor is unloaded. // throw RuntimeException so that the coprocessor is unloaded.
if (zk != null) { if (zk != null) {
@ -1358,7 +1351,7 @@ public class AccessController extends BaseMasterAndRegionObserver
} else { } else {
HRegionInfo regionInfo = region.getRegionInfo(); HRegionInfo regionInfo = region.getRegionInfo();
if (regionInfo.getTable().isSystemTable()) { if (regionInfo.getTable().isSystemTable()) {
isSystemOrSuperUser(regionEnv.getConfiguration()); checkSystemOrSuperUser();
} else { } else {
requirePermission("preOpen", Action.ADMIN); requirePermission("preOpen", Action.ADMIN);
} }
@ -2404,20 +2397,15 @@ public class AccessController extends BaseMasterAndRegionObserver
requirePermission("preClose", Action.ADMIN); requirePermission("preClose", Action.ADMIN);
} }
private void isSystemOrSuperUser(Configuration conf) throws IOException { private void checkSystemOrSuperUser() throws IOException {
// No need to check if we're not going to throw // No need to check if we're not going to throw
if (!authorizationEnabled) { if (!authorizationEnabled) {
return; return;
} }
User user = userProvider.getCurrent();
if (user == null) {
throw new IOException("Unable to obtain the current user, " +
"authorization checks for internal operations will not work correctly!");
}
User activeUser = getActiveUser(); User activeUser = getActiveUser();
if (!(superusers.contains(activeUser.getShortName()))) { if (!Superusers.isSuperUser(activeUser)) {
throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null") + throw new AccessDeniedException("User '" + (activeUser != null ?
"is not system or super user."); activeUser.getShortName() : "null") + "is not system or super user.");
} }
} }

View File

@ -27,11 +27,13 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -79,13 +81,13 @@ public class TableAuthManager {
/** /**
* Returns a combined map of user and group permissions, with group names prefixed by * Returns a combined map of user and group permissions, with group names prefixed by
* {@link AccessControlLists#GROUP_PREFIX}. * {@link AuthUtil#GROUP_PREFIX}.
*/ */
public ListMultimap<String,T> getAllPermissions() { public ListMultimap<String,T> getAllPermissions() {
ListMultimap<String,T> tmp = ArrayListMultimap.create(); ListMultimap<String,T> tmp = ArrayListMultimap.create();
tmp.putAll(userCache); tmp.putAll(userCache);
for (String group : groupCache.keySet()) { for (String group : groupCache.keySet()) {
tmp.putAll(AccessControlLists.GROUP_PREFIX + group, groupCache.get(group)); tmp.putAll(AuthUtil.toGroupEntry(group), groupCache.get(group));
} }
return tmp; return tmp;
} }
@ -139,11 +141,11 @@ public class TableAuthManager {
// the system user is always included // the system user is always included
List<String> superusers = Lists.asList(currentUser, conf.getStrings( List<String> superusers = Lists.asList(currentUser, conf.getStrings(
AccessControlLists.SUPERUSER_CONF_KEY, new String[0])); Superusers.SUPERUSER_CONF_KEY, new String[0]));
if (superusers != null) { if (superusers != null) {
for (String name : superusers) { for (String name : superusers) {
if (AccessControlLists.isGroupPrincipal(name)) { if (AuthUtil.isGroupPrincipal(name)) {
newCache.putGroup(AccessControlLists.getGroupName(name), newCache.putGroup(AuthUtil.getGroupName(name),
new Permission(Permission.Action.values())); new Permission(Permission.Action.values()));
} else { } else {
newCache.putUser(name, new Permission(Permission.Action.values())); newCache.putUser(name, new Permission(Permission.Action.values()));
@ -205,8 +207,8 @@ public class TableAuthManager {
try { try {
newCache = initGlobal(conf); newCache = initGlobal(conf);
for (Map.Entry<String,TablePermission> entry : userPerms.entries()) { for (Map.Entry<String,TablePermission> entry : userPerms.entries()) {
if (AccessControlLists.isGroupPrincipal(entry.getKey())) { if (AuthUtil.isGroupPrincipal(entry.getKey())) {
newCache.putGroup(AccessControlLists.getGroupName(entry.getKey()), newCache.putGroup(AuthUtil.getGroupName(entry.getKey()),
new Permission(entry.getValue().getActions())); new Permission(entry.getValue().getActions()));
} else { } else {
newCache.putUser(entry.getKey(), new Permission(entry.getValue().getActions())); newCache.putUser(entry.getKey(), new Permission(entry.getValue().getActions()));
@ -233,8 +235,8 @@ public class TableAuthManager {
PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>(); PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>();
for (Map.Entry<String,TablePermission> entry : tablePerms.entries()) { for (Map.Entry<String,TablePermission> entry : tablePerms.entries()) {
if (AccessControlLists.isGroupPrincipal(entry.getKey())) { if (AuthUtil.isGroupPrincipal(entry.getKey())) {
newTablePerms.putGroup(AccessControlLists.getGroupName(entry.getKey()), entry.getValue()); newTablePerms.putGroup(AuthUtil.getGroupName(entry.getKey()), entry.getValue());
} else { } else {
newTablePerms.putUser(entry.getKey(), entry.getValue()); newTablePerms.putUser(entry.getKey(), entry.getValue());
} }
@ -257,8 +259,8 @@ public class TableAuthManager {
PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>(); PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>();
for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) { for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) {
if (AccessControlLists.isGroupPrincipal(entry.getKey())) { if (AuthUtil.isGroupPrincipal(entry.getKey())) {
newTablePerms.putGroup(AccessControlLists.getGroupName(entry.getKey()), entry.getValue()); newTablePerms.putGroup(AuthUtil.getGroupName(entry.getKey()), entry.getValue());
} else { } else {
newTablePerms.putUser(entry.getKey(), entry.getValue()); newTablePerms.putUser(entry.getKey(), entry.getValue());
} }

View File

@ -42,6 +42,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -59,8 +60,8 @@ import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.regionserver.OperationStatus; import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@ -80,8 +81,6 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
private Region labelsRegion; private Region labelsRegion;
private VisibilityLabelsCache labelsCache; private VisibilityLabelsCache labelsCache;
private List<ScanLabelGenerator> scanLabelGenerators; private List<ScanLabelGenerator> scanLabelGenerators;
private List<String> superUsers;
private List<String> superGroups;
static { static {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -118,10 +117,6 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
throw ioe; throw ioe;
} }
this.scanLabelGenerators = VisibilityUtils.getScanLabelGenerators(this.conf); this.scanLabelGenerators = VisibilityUtils.getScanLabelGenerators(this.conf);
Pair<List<String>, List<String>> superUsersAndGroups =
VisibilityUtils.getSystemAndSuperUsers(this.conf);
this.superUsers = superUsersAndGroups.getFirst();
this.superGroups = superUsersAndGroups.getSecond();
if (e.getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { if (e.getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) {
this.labelsRegion = e.getRegion(); this.labelsRegion = e.getRegion();
Pair<Map<String, Integer>, Map<String, List<Integer>>> labelsAndUserAuths = Pair<Map<String, Integer>, Map<String, List<Integer>>> labelsAndUserAuths =
@ -266,8 +261,8 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
assert labelsRegion != null; assert labelsRegion != null;
OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()]; OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
List<String> currentAuths; List<String> currentAuths;
if (AccessControlLists.isGroupPrincipal(Bytes.toString(user))) { if (AuthUtil.isGroupPrincipal(Bytes.toString(user))) {
String group = AccessControlLists.getGroupName(Bytes.toString(user)); String group = AuthUtil.getGroupName(Bytes.toString(user));
currentAuths = this.getGroupAuths(new String[]{group}, true); currentAuths = this.getGroupAuths(new String[]{group}, true);
} }
else { else {
@ -308,7 +303,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
private boolean mutateLabelsRegion(List<Mutation> mutations, OperationStatus[] finalOpStatus) private boolean mutateLabelsRegion(List<Mutation> mutations, OperationStatus[] finalOpStatus)
throws IOException { throws IOException {
OperationStatus[] opStatus = this.labelsRegion.batchMutate(mutations OperationStatus[] opStatus = this.labelsRegion.batchMutate(mutations
.toArray(new Mutation[mutations.size()]), HConstants.NO_NONCE, HConstants.NO_NONCE); .toArray(new Mutation[mutations.size()]), HConstants.NO_NONCE, HConstants.NO_NONCE);
int i = 0; int i = 0;
boolean updateZk = false; boolean updateZk = false;
for (OperationStatus status : opStatus) { for (OperationStatus status : opStatus) {
@ -369,7 +364,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
Scan s = new Scan(); Scan s = new Scan();
if (groups != null && groups.length > 0) { if (groups != null && groups.length > 0) {
for (String group : groups) { for (String group : groups) {
s.addColumn(LABELS_TABLE_FAMILY, Bytes.toBytes(AccessControlLists.toGroupEntry(group))); s.addColumn(LABELS_TABLE_FAMILY, Bytes.toBytes(AuthUtil.toGroupEntry(group)));
} }
} }
Filter filter = VisibilityUtils.createVisibilityLabelFilter(this.labelsRegion, Filter filter = VisibilityUtils.createVisibilityLabelFilter(this.labelsRegion,
@ -539,7 +534,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
@Override @Override
public boolean havingSystemAuth(User user) throws IOException { public boolean havingSystemAuth(User user) throws IOException {
// A super user has 'system' auth. // A super user has 'system' auth.
if (isSystemOrSuperUser(user)) { if (Superusers.isSuperUser(user)) {
return true; return true;
} }
// A user can also be explicitly granted 'system' auth. // A user can also be explicitly granted 'system' auth.
@ -557,21 +552,6 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
return auths.contains(SYSTEM_LABEL); return auths.contains(SYSTEM_LABEL);
} }
private boolean isSystemOrSuperUser(User user) throws IOException {
if (this.superUsers.contains(user.getShortName())) {
return true;
}
String[] groups = user.getGroupNames();
if (groups != null && groups.length > 0) {
for (String group : groups) {
if (this.superGroups.contains(group)) {
return true;
}
}
}
return false;
}
@Override @Override
public boolean matchVisibility(List<Tag> putVisTags, Byte putTagsFormat, List<Tag> deleteVisTags, public boolean matchVisibility(List<Tag> putVisTags, Byte putTagsFormat, List<Tag> deleteVisTags,
Byte deleteTagsFormat) throws IOException { Byte deleteTagsFormat) throws IOException {

View File

@ -34,6 +34,7 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
@ -97,8 +98,8 @@ import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.access.AccessController; import org.apache.hadoop.hbase.security.access.AccessController;
import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -133,8 +134,6 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
private Map<InternalScanner,String> scannerOwners = private Map<InternalScanner,String> scannerOwners =
new MapMaker().weakKeys().makeMap(); new MapMaker().weakKeys().makeMap();
private List<String> superUsers;
private List<String> superGroups;
private VisibilityLabelService visibilityLabelService; private VisibilityLabelService visibilityLabelService;
/** if we are active, usually true, only not true if "hbase.security.authorization" /** if we are active, usually true, only not true if "hbase.security.authorization"
@ -173,10 +172,6 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
visibilityLabelService = VisibilityLabelServiceManager.getInstance() visibilityLabelService = VisibilityLabelServiceManager.getInstance()
.getVisibilityLabelService(this.conf); .getVisibilityLabelService(this.conf);
} }
Pair<List<String>, List<String>> superUsersAndGroups =
VisibilityUtils.getSystemAndSuperUsers(this.conf);
this.superUsers = superUsersAndGroups.getFirst();
this.superGroups = superUsersAndGroups.getSecond();
} }
@Override @Override
@ -687,19 +682,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
} }
private boolean isSystemOrSuperUser() throws IOException { private boolean isSystemOrSuperUser() throws IOException {
User activeUser = VisibilityUtils.getActiveUser(); return Superusers.isSuperUser(VisibilityUtils.getActiveUser());
if (this.superUsers.contains(activeUser.getShortName())) {
return true;
}
String[] groups = activeUser.getGroupNames();
if (groups != null && groups.length > 0) {
for (String group : groups) {
if (this.superGroups.contains(group)) {
return true;
}
}
}
return false;
} }
@Override @Override
@ -934,8 +917,8 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
+ (requestingUser != null ? requestingUser.getShortName() : "null") + (requestingUser != null ? requestingUser.getShortName() : "null")
+ "' is not authorized to perform this action."); + "' is not authorized to perform this action.");
} }
if (AccessControlLists.isGroupPrincipal(Bytes.toString(user))) { if (AuthUtil.isGroupPrincipal(Bytes.toString(user))) {
String group = AccessControlLists.getGroupName(Bytes.toString(user)); String group = AuthUtil.getGroupName(Bytes.toString(user));
labels = this.visibilityLabelService.getGroupAuths(new String[]{group}, false); labels = this.visibilityLabelService.getGroupAuths(new String[]{group}, false);
} }
else { else {

View File

@ -30,12 +30,12 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
@ -145,8 +145,8 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
this.groupAuths.clear(); this.groupAuths.clear();
for (UserAuthorizations userAuths : multiUserAuths.getUserAuthsList()) { for (UserAuthorizations userAuths : multiUserAuths.getUserAuthsList()) {
String user = Bytes.toString(userAuths.getUser().toByteArray()); String user = Bytes.toString(userAuths.getUser().toByteArray());
if (AccessControlLists.isGroupPrincipal(user)) { if (AuthUtil.isGroupPrincipal(user)) {
this.groupAuths.put(AccessControlLists.getGroupName(user), this.groupAuths.put(AuthUtil.getGroupName(user),
new HashSet<Integer>(userAuths.getAuthList())); new HashSet<Integer>(userAuths.getAuthList()));
} else { } else {
this.userAuths.put(user, new HashSet<Integer>(userAuths.getAuthList())); this.userAuths.put(user, new HashSet<Integer>(userAuths.getAuthList()));

View File

@ -53,7 +53,6 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode;
@ -61,7 +60,6 @@ import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
@ -102,38 +100,6 @@ public class VisibilityUtils {
return ProtobufUtil.prependPBMagic(visReqBuilder.build().toByteArray()); return ProtobufUtil.prependPBMagic(visReqBuilder.build().toByteArray());
} }
/**
* Get the super users and groups defined in the configuration.
* The user running the hbase server is always included.
* @param conf
* @return Pair of super user list and super group list.
* @throws IOException
*/
public static Pair<List<String>, List<String>> getSystemAndSuperUsers(Configuration conf)
throws IOException {
ArrayList<String> superUsers = new ArrayList<String>();
ArrayList<String> superGroups = new ArrayList<String>();
User user = User.getCurrent();
if (user == null) {
throw new IOException("Unable to obtain the current user, "
+ "authorization checks for internal operations will not work correctly!");
}
if (LOG.isTraceEnabled()) {
LOG.trace("Current user name is " + user.getShortName());
}
String currentUser = user.getShortName();
String[] superUserList = conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0]);
for (String name : superUserList) {
if (AccessControlLists.isGroupPrincipal(name)) {
superGroups.add(AccessControlLists.getGroupName(name));
} else {
superUsers.add(name);
}
}
superUsers.add(currentUser);
return new Pair<List<String>, List<String>>(superUsers, superGroups);
}
/** /**
* Creates the user auth data to be written to zookeeper. * Creates the user auth data to be written to zookeeper.
* @param userAuths * @param userAuths

View File

@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@ -122,13 +123,15 @@ public class TestPriorityRpc {
PriorityFunction qosFunc = regionServer.rpcServices.getPriority(); PriorityFunction qosFunc = regionServer.rpcServices.getPriority();
//test superusers //test superusers
((AnnotationReadingPriorityFunction) qosFunc).superUsers.add("samplesuperuser"); regionServer.conf.set(Superusers.SUPERUSER_CONF_KEY, "samplesuperuser");
Superusers.initialize(regionServer.conf);
assertEquals(HConstants.ADMIN_QOS, qosFunc.getPriority(header, null, assertEquals(HConstants.ADMIN_QOS, qosFunc.getPriority(header, null,
User.createUserForTesting(regionServer.conf, "samplesuperuser", User.createUserForTesting(regionServer.conf, "samplesuperuser",
new String[]{"somegroup"}))); new String[]{"somegroup"})));
//test supergroups //test supergroups
((AnnotationReadingPriorityFunction) qosFunc).superGroups.add("samplesupergroup"); regionServer.conf.set(Superusers.SUPERUSER_CONF_KEY, "@samplesupergroup");
Superusers.initialize(regionServer.conf);
assertEquals(HConstants.ADMIN_QOS, qosFunc.getPriority(header, null, assertEquals(HConstants.ADMIN_QOS, qosFunc.getPriority(header, null,
User.createUserForTesting(regionServer.conf, "regularuser", User.createUserForTesting(regionServer.conf, "regularuser",
new String[]{"samplesupergroup"}))); new String[]{"samplesupergroup"})));

View File

@ -1,4 +1,3 @@
package org.apache.hadoop.hbase.regionserver;
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
@ -16,19 +15,23 @@ package org.apache.hadoop.hbase.regionserver;
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
@ -40,6 +43,8 @@ import org.mockito.Mockito;
import com.google.protobuf.Message; import com.google.protobuf.Message;
import java.io.IOException;
/** /**
* Basic test that qos function is sort of working; i.e. a change in method naming style * Basic test that qos function is sort of working; i.e. a change in method naming style
* over in pb doesn't break it. * over in pb doesn't break it.
@ -66,8 +71,9 @@ public class TestQosFunction {
} }
@Test @Test
public void testRegionInTransition() { public void testRegionInTransition() throws IOException {
Configuration conf = HBaseConfiguration.create(); Configuration conf = HBaseConfiguration.create();
Superusers.initialize(conf);
RSRpcServices rpcServices = Mockito.mock(RSRpcServices.class); RSRpcServices rpcServices = Mockito.mock(RSRpcServices.class);
when(rpcServices.getConfiguration()).thenReturn(conf); when(rpcServices.getConfiguration()).thenReturn(conf);

View File

@ -719,10 +719,6 @@ public class SecureTestUtil {
return AccessControlLists.NAMESPACE_PREFIX + namespace; return AccessControlLists.NAMESPACE_PREFIX + namespace;
} }
public static String convertToGroup(String group) {
return AccessControlLists.GROUP_PREFIX + group;
}
public static void checkGlobalPerms(HBaseTestingUtility testUtil, Permission.Action... actions) public static void checkGlobalPerms(HBaseTestingUtility testUtil, Permission.Action... actions)
throws IOException { throws IOException {
Permission[] perms = new Permission[actions.length]; Permission[] perms = new Permission[actions.length];

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
@ -284,10 +285,10 @@ public class TestAccessController extends SecureTestUtil {
TEST_TABLE, TEST_FAMILY, TEST_TABLE, TEST_FAMILY,
null, Permission.Action.ADMIN, Permission.Action.CREATE); null, Permission.Action.ADMIN, Permission.Action.CREATE);
grantGlobal(TEST_UTIL, convertToGroup(GROUP_ADMIN), Permission.Action.ADMIN); grantGlobal(TEST_UTIL, toGroupEntry(GROUP_ADMIN), Permission.Action.ADMIN);
grantGlobal(TEST_UTIL, convertToGroup(GROUP_CREATE), Permission.Action.CREATE); grantGlobal(TEST_UTIL, toGroupEntry(GROUP_CREATE), Permission.Action.CREATE);
grantGlobal(TEST_UTIL, convertToGroup(GROUP_READ), Permission.Action.READ); grantGlobal(TEST_UTIL, toGroupEntry(GROUP_READ), Permission.Action.READ);
grantGlobal(TEST_UTIL, convertToGroup(GROUP_WRITE), Permission.Action.WRITE); grantGlobal(TEST_UTIL, toGroupEntry(GROUP_WRITE), Permission.Action.WRITE);
assertEquals(5, AccessControlLists.getTablePermissions(conf, TEST_TABLE).size()); assertEquals(5, AccessControlLists.getTablePermissions(conf, TEST_TABLE).size());
try { try {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
@ -109,7 +110,7 @@ public class TestAccessController2 extends SecureTestUtil {
// Wait for the ACL table to become available // Wait for the ACL table to become available
TEST_UTIL.waitUntilAllRegionsAssigned(AccessControlLists.ACL_TABLE_NAME); TEST_UTIL.waitUntilAllRegionsAssigned(AccessControlLists.ACL_TABLE_NAME);
TESTGROUP_1_NAME = convertToGroup(TESTGROUP_1); TESTGROUP_1_NAME = toGroupEntry(TESTGROUP_1);
TESTGROUP1_USER1 = TESTGROUP1_USER1 =
User.createUserForTesting(conf, "testgroup1_user1", new String[] { TESTGROUP_1 }); User.createUserForTesting(conf, "testgroup1_user1", new String[] { TESTGROUP_1 });
TESTGROUP2_USER1 = TESTGROUP2_USER1 =

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -164,10 +165,10 @@ public class TestNamespaceCommands extends SecureTestUtil {
grantOnNamespace(UTIL, USER_NS_ADMIN.getShortName(), TEST_NAMESPACE2, Permission.Action.ADMIN); grantOnNamespace(UTIL, USER_NS_ADMIN.getShortName(), TEST_NAMESPACE2, Permission.Action.ADMIN);
grantGlobal(UTIL, convertToGroup(GROUP_ADMIN), Permission.Action.ADMIN); grantGlobal(UTIL, toGroupEntry(GROUP_ADMIN), Permission.Action.ADMIN);
grantGlobal(UTIL, convertToGroup(GROUP_CREATE), Permission.Action.CREATE); grantGlobal(UTIL, toGroupEntry(GROUP_CREATE), Permission.Action.CREATE);
grantGlobal(UTIL, convertToGroup(GROUP_READ), Permission.Action.READ); grantGlobal(UTIL, toGroupEntry(GROUP_READ), Permission.Action.READ);
grantGlobal(UTIL, convertToGroup(GROUP_WRITE), Permission.Action.WRITE); grantGlobal(UTIL, toGroupEntry(GROUP_WRITE), Permission.Action.WRITE);
} }
@AfterClass @AfterClass

View File

@ -35,6 +35,7 @@ import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
@ -51,16 +52,14 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.OperationStatus; import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.Operator; import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.collect.Lists;
/** /**
* This is a VisibilityLabelService where labels in Mutation's visibility * This is a VisibilityLabelService where labels in Mutation's visibility
* expression will be persisted as Strings itself rather than ordinals in * expression will be persisted as Strings itself rather than ordinals in
@ -82,8 +81,6 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
private Configuration conf; private Configuration conf;
private Region labelsRegion; private Region labelsRegion;
private List<ScanLabelGenerator> scanLabelGenerators; private List<ScanLabelGenerator> scanLabelGenerators;
private List<String> superUsers;
private List<String> superGroups;
@Override @Override
public OperationStatus[] addLabels(List<byte[]> labels) throws IOException { public OperationStatus[] addLabels(List<byte[]> labels) throws IOException {
@ -118,8 +115,8 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
assert labelsRegion != null; assert labelsRegion != null;
OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()]; OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
List<String> currentAuths; List<String> currentAuths;
if (AccessControlLists.isGroupPrincipal(Bytes.toString(user))) { if (AuthUtil.isGroupPrincipal(Bytes.toString(user))) {
String group = AccessControlLists.getGroupName(Bytes.toString(user)); String group = AuthUtil.getGroupName(Bytes.toString(user));
currentAuths = this.getGroupAuths(new String[]{group}, true); currentAuths = this.getGroupAuths(new String[]{group}, true);
} }
else { else {
@ -190,7 +187,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
List<String> auths = new ArrayList<String>(); List<String> auths = new ArrayList<String>();
if (groups != null && groups.length > 0) { if (groups != null && groups.length > 0) {
for (String group : groups) { for (String group : groups) {
Get get = new Get(Bytes.toBytes(AccessControlLists.toGroupEntry(group))); Get get = new Get(Bytes.toBytes(AuthUtil.toGroupEntry(group)));
List<Cell> cells = null; List<Cell> cells = null;
if (labelsRegion == null) { if (labelsRegion == null) {
Table table = null; Table table = null;
@ -393,55 +390,14 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
@Override @Override
public void init(RegionCoprocessorEnvironment e) throws IOException { public void init(RegionCoprocessorEnvironment e) throws IOException {
this.scanLabelGenerators = VisibilityUtils.getScanLabelGenerators(this.conf); this.scanLabelGenerators = VisibilityUtils.getScanLabelGenerators(this.conf);
initSystemAndSuperUsers();
if (e.getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { if (e.getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) {
this.labelsRegion = e.getRegion(); this.labelsRegion = e.getRegion();
} }
} }
private void initSystemAndSuperUsers() throws IOException {
this.superUsers = new ArrayList<String>();
this.superGroups = new ArrayList<String>();
User user = User.getCurrent();
if (user == null) {
throw new IOException("Unable to obtain the current user, "
+ "authorization checks for internal operations will not work correctly!");
}
if (LOG.isTraceEnabled()) {
LOG.trace("Current user name is " + user.getShortName());
}
String currentUser = user.getShortName();
List<String> superUserList = Lists.asList(currentUser,
this.conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0]));
if (superUserList != null) {
for (String name : superUserList) {
if (AccessControlLists.isGroupPrincipal(name)) {
this.superGroups.add(AccessControlLists.getGroupName(name));
} else {
this.superUsers.add(name);
}
}
};
}
protected boolean isSystemOrSuperUser(User user) throws IOException {
if (this.superUsers.contains(user.getShortName())) {
return true;
}
String[] groups = user.getGroupNames();
if (groups != null) {
for (String group : groups) {
if (this.superGroups.contains(group)) {
return true;
}
}
}
return false;
}
@Override @Override
public boolean havingSystemAuth(User user) throws IOException { public boolean havingSystemAuth(User user) throws IOException {
if (isSystemOrSuperUser(user)) { if (Superusers.isSuperUser(user)) {
return true; return true;
} }
Set<String> auths = new HashSet<String>(); Set<String> auths = new HashSet<String>();