HBASE-11318 Classes in security subpackages missing @InterfaceAudience annotations (Jonathan Hsieh and Andrew Purtell)

This commit is contained in:
Andrew Purtell 2014-08-04 11:13:58 -07:00
parent ec5a859b92
commit 2084272d74
29 changed files with 60 additions and 2 deletions

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.DataInput;
@ -26,6 +27,7 @@ import java.io.DataOutput;
import java.io.IOException;
/** Authentication method */
@InterfaceAudience.Private
public enum AuthMethod {
SIMPLE((byte) 80, "", UserGroupInformation.AuthenticationMethod.SIMPLE),
KERBEROS((byte) 81, "GSSAPI", UserGroupInformation.AuthenticationMethod.KERBEROS),

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.SaslInputStream;
@ -52,6 +53,7 @@ import com.google.common.annotations.VisibleForTesting;
* A utility class that encapsulates SASL logic for RPC client.
* Copied from <code>org.apache.hadoop.security</code>
*/
@InterfaceAudience.Private
public class HBaseSaslRpcClient {
public static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class);

View File

@ -19,6 +19,9 @@
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public enum SaslStatus {
SUCCESS (0),
ERROR (1);

View File

@ -19,12 +19,14 @@
package org.apache.hadoop.hbase.security;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.classification.InterfaceAudience;
import java.util.Map;
import java.util.TreeMap;
import javax.security.sasl.Sasl;
@InterfaceAudience.Private
public class SaslUtil {
public static final String SASL_DEFAULT_REALM = "default";
public static final Map<String, String> SASL_PROPS =

View File

@ -18,10 +18,12 @@
*/
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class SecureBulkLoadUtil {
private final static String BULKLOAD_STAGING_DIR = "hbase.bulkload.staging.dir";

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos;
/**
* Maps RPC protocol interfaces to required configuration
*/
@InterfaceAudience.Private
public class SecurityInfo {
/** Maps RPC service names to authentication information */
private static ConcurrentMap<String,SecurityInfo> infos = new ConcurrentHashMap<String,SecurityInfo>();

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.security.access;
import com.google.common.collect.Maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.VersionedWritable;
@ -36,6 +38,8 @@ import java.util.Map;
*
* @see TablePermission
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class Permission extends VersionedWritable {
protected static final byte VERSION = 0;
public enum Action {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.access;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
@ -34,6 +35,7 @@ import java.io.IOException;
* given table. If the family property is <code>null</code>, it implies
* full table access.
*/
@InterfaceAudience.Private
public class TablePermission extends Permission {
private static Log LOG = LogFactory.getLog(TablePermission.class);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.access;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
@ -31,6 +32,7 @@ import java.io.IOException;
* Represents an authorization for access over the given table, column family
* plus qualifier, for the given user.
*/
@InterfaceAudience.Private
public class UserPermission extends TablePermission {
private static Log LOG = LogFactory.getLog(UserPermission.class);

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.security.token;
import com.google.protobuf.ByteString;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
@ -31,6 +32,7 @@ import java.io.IOException;
/**
* Represents the identity information stored in an HBase authentication token.
*/
@InterfaceAudience.Private
public class AuthenticationTokenIdentifier extends TokenIdentifier {
public static final Text AUTH_TOKEN_TYPE = new Text("HBASE_AUTH_TOKEN");

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.token;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@ -27,6 +28,7 @@ import org.apache.hadoop.security.token.TokenSelector;
import java.util.Collection;
@InterfaceAudience.Private
public class AuthenticationTokenSelector
implements TokenSelector<AuthenticationTokenIdentifier> {
private static Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class);

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* current scan/get can access.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceStability.Evolving
public class Authorizations {
private List<String> labels;

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
*/
@InterfaceAudience.Private
public class VisibilityLabelsValidator {
// We follow Accumulo parity for valid visibility labels.
private static final boolean[] validAuthChars = new boolean[256];
public static final String regex = "[A-Za-z_\\-\\:\\/\\.0-9]+";

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.security;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.BaseConfigurable;
import org.apache.hadoop.security.UserGroupInformation;
@ -27,6 +28,8 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* Provide an instance of a user. Allows custom {@link User} creation.
*/
@InterfaceAudience.Private
public class UserProvider extends BaseConfigurable {
private static final String USER_PROVIDER_CONF_KEY = "hbase.client.userprovider.class";

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
@ -31,6 +32,7 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
* Implementation of secure Hadoop policy provider for mapping
* protocol interfaces to hbase-policy.xml entries.
*/
@InterfaceAudience.Private
public class HBasePolicyProvider extends PolicyProvider {
protected final static Service[] services = {
new Service("security.client.protocol.acl", ClientService.BlockingInterface.class),

View File

@ -32,6 +32,7 @@ import javax.security.sasl.RealmCallback;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@ -43,6 +44,7 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken;
/**
* A utility class for dealing with SASL on RPC server
*/
@InterfaceAudience.Private
public class HBaseSaslRpcServer {
public static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class);

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Security related generic utility methods.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class SecurityUtil {
/**

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security.access;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
@ -46,6 +47,7 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
* access succeeds, then there is no need to impose the overhead of this filter.
* </p>
*/
@InterfaceAudience.Private
class AccessControlFilter extends FilterBase {
public static enum Strategy {

View File

@ -33,6 +33,7 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@ -94,6 +95,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
* org.apache.hadoop.hbase.security.access.TablePermission.Action enum.
* </p>
*/
@InterfaceAudience.Private
public class AccessControlLists {
/** Internal storage table for access control lists */
public static final TableName ACL_TABLE_NAME =

View File

@ -29,6 +29,7 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@ -144,6 +145,7 @@ import com.google.protobuf.Service;
* commands.
* </p>
*/
@InterfaceAudience.Private
public class AccessController extends BaseRegionObserver
implements MasterObserver, RegionServerObserver,
AccessControlService.Interface, CoprocessorService, EndpointObserver {

View File

@ -26,6 +26,7 @@ import java.util.concurrent.ConcurrentSkipListMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.TableName;
@ -43,6 +44,7 @@ import com.google.common.collect.Lists;
/**
* Performs authorization checks for a given user's assigned permissions
*/
@InterfaceAudience.Private
public class TableAuthManager {
private static class PermissionCache<T extends Permission> {
/** Cache of user permissions */

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.access;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
@ -40,6 +41,7 @@ import java.util.List;
* instances on all other cluster hosts watch the znodes for updates, which
* trigger updates in the {@link TableAuthManager} permission cache.
*/
@InterfaceAudience.Private
public class ZKPermissionWatcher extends ZooKeeperListener {
private static Log LOG = LogFactory.getLog(ZKPermissionWatcher.class);
// parent node for permissions lists

View File

@ -25,6 +25,7 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
@ -33,6 +34,7 @@ import org.apache.hadoop.io.WritableUtils;
* Represents a secret key used for signing and verifying authentication tokens
* by {@link AuthenticationTokenSecretManager}.
*/
@InterfaceAudience.Private
public class AuthenticationKey implements Writable {
private int id;
private long expirationDate;

View File

@ -27,6 +27,7 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.util.Bytes;
@ -54,6 +55,7 @@ import org.apache.zookeeper.KeeperException;
* are no longer needed (as any tokens using them have expired).
* </p>
*/
@InterfaceAudience.Private
public class AuthenticationTokenSecretManager
extends SecretManager<AuthenticationTokenIdentifier> {

View File

@ -25,6 +25,7 @@ import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
@ -46,6 +47,7 @@ import org.apache.hadoop.security.token.Token;
* Provides a service for obtaining authentication tokens via the
* {@link AuthenticationProtos} AuthenticationService coprocessor service.
*/
@InterfaceAudience.Private
public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface,
Coprocessor, CoprocessorService {

View File

@ -25,6 +25,7 @@ import java.security.PrivilegedExceptionAction;
import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@ -41,6 +42,7 @@ import org.apache.hadoop.security.token.Token;
/**
* Utility methods for obtaining authentication tokens.
*/
@InterfaceAudience.Private
public class TokenUtil {
private static Log LOG = LogFactory.getLog(TokenUtil.class);

View File

@ -23,6 +23,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@ -33,6 +34,7 @@ import org.apache.zookeeper.KeeperException;
/**
* Synchronizes token encryption keys across cluster nodes.
*/
@InterfaceAudience.Private
public class ZKSecretWatcher extends ZooKeeperListener {
private static final String DEFAULT_ROOT_NODE = "tokenauth";
private static final String DEFAULT_KEYS_PARENT = "keys";

View File

@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hbase.security.visibility.expression;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public enum Operator {
AND('&'), OR('|'), NOT('!');

View File

@ -17,12 +17,14 @@
*/
package org.apache.hadoop.hbase.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseConfiguration;
import com.google.common.base.Strings;
@InterfaceAudience.Private
class HBaseKerberosUtils {
public static final String KRB_PRINCIPAL = "hbase.regionserver.kerberos.principal";
public static final String KRB_KEYTAB_FILE = "hbase.regionserver.keytab.file";