HDFS-3724. add InterfaceAudience annotations to HttpFS classes and making inner enum static. (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1368308 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-08-01 23:14:08 +00:00
parent fe17d871d0
commit 08e8966217
56 changed files with 165 additions and 4 deletions

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.DelegationTokenRenewer;
@ -68,6 +69,7 @@
* <p/>
* This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
*/
@InterfaceAudience.Private
public class HttpFSFileSystem extends FileSystem
implements DelegationTokenRenewer.Renewable {
@ -160,7 +162,8 @@ public static FILE_TYPE getType(FileStatus fileStatus) {
private static final String HTTP_POST = "POST";
private static final String HTTP_DELETE = "DELETE";
public enum Operation {
@InterfaceAudience.Private
public static enum Operation {
OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET),
GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
GETFILECHECKSUM(HTTP_GET), GETFILEBLOCKLOCATIONS(HTTP_GET),

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
@ -43,6 +44,7 @@
* A <code>KerberosAuthenticator</code> subclass that fallback to
* {@link HttpFSPseudoAuthenticator}.
*/
@InterfaceAudience.Private
public class HttpFSKerberosAuthenticator extends KerberosAuthenticator {
/**
@ -71,6 +73,7 @@ protected Authenticator getFallBackAuthenticator() {
/**
* DelegationToken operations.
*/
@InterfaceAudience.Private
public static enum DelegationTokenOperation {
GETDELEGATIONTOKEN(HTTP_GET, true),
GETDELEGATIONTOKENS(HTTP_GET, true),

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
@ -27,6 +28,7 @@
* A <code>PseudoAuthenticator</code> subclass that uses FileSystemAccess's
* <code>UserGroupInformation</code> to obtain the client user name (the UGI's login user).
*/
@InterfaceAudience.Private
public class HttpFSPseudoAuthenticator extends PseudoAuthenticator {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
@ -35,6 +36,7 @@
/**
* Utility methods used by HttpFS classes.
*/
@InterfaceAudience.Private
public class HttpFSUtils {
public static final String SERVICE_NAME = "/webhdfs";

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import javax.servlet.Filter;
@ -37,6 +38,7 @@
* Filter that Enforces the content-type to be application/octet-stream for
* POST and PUT requests.
*/
@InterfaceAudience.Private
public class CheckUploadContentTypeFilter implements Filter {
private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
@ -40,6 +41,7 @@
/**
* FileSystem operation executors used by {@link HttpFSServer}.
*/
@InterfaceAudience.Private
public class FSOperations {
@SuppressWarnings({"unchecked", "deprecation"})
@ -160,6 +162,7 @@ private static JSONObject toJSON(String name, Object value) {
/**
* Executor that performs an append FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSAppend implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@ -198,6 +201,7 @@ public Void execute(FileSystem fs) throws IOException {
/**
* Executor that performs a content-summary FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSContentSummary implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@ -230,6 +234,7 @@ public Map execute(FileSystem fs) throws IOException {
/**
* Executor that performs a create FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSCreate implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@ -288,6 +293,7 @@ public Void execute(FileSystem fs) throws IOException {
/**
* Executor that performs a delete FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private boolean recursive;
@ -324,6 +330,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
/**
* Executor that performs a file-checksum FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSFileChecksum implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@ -356,6 +363,7 @@ public Map execute(FileSystem fs) throws IOException {
/**
* Executor that performs a file-status FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSFileStatus implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@ -388,6 +396,7 @@ public Map execute(FileSystem fs) throws IOException {
/**
* Executor that performs a home-dir FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSHomeDir implements FileSystemAccess.FileSystemExecutor<JSONObject> {
/**
@ -413,6 +422,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
/**
* Executor that performs a list-status FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSListStatus implements FileSystemAccess.FileSystemExecutor<Map>, PathFilter {
private Path path;
private PathFilter filter;
@ -456,6 +466,7 @@ public boolean accept(Path path) {
/**
* Executor that performs a mkdirs FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSMkdirs implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
@ -494,6 +505,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
/**
* Executor that performs a open FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSOpen implements FileSystemAccess.FileSystemExecutor<InputStream> {
private Path path;
@ -526,6 +538,7 @@ public InputStream execute(FileSystem fs) throws IOException {
/**
* Executor that performs a rename FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSRename implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private Path toPath;
@ -562,6 +575,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
/**
* Executor that performs a set-owner FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSSetOwner implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private String owner;
@ -600,6 +614,7 @@ public Void execute(FileSystem fs) throws IOException {
/**
* Executor that performs a set-permission FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSSetPermission implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
@ -637,6 +652,7 @@ public Void execute(FileSystem fs) throws IOException {
/**
* Executor that performs a set-replication FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSSetReplication implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private short replication;
@ -676,6 +692,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
/**
* Executor that performs a set-times FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static class FSSetTimes implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private long mTime;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import javax.servlet.FilterConfig;
@ -30,6 +31,7 @@
* Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
* from HttpFSServer's server configuration.
*/
@InterfaceAudience.Private
public class HttpFSAuthenticationFilter extends AuthenticationFilter {
private static final String CONF_PREFIX = "httpfs.authentication.";

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
import com.sun.jersey.api.container.ContainerException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.wsrs.ExceptionProvider;
import org.slf4j.Logger;
@ -35,6 +36,7 @@
* exceptions to HTTP status codes.
*/
@Provider
@InterfaceAudience.Private
public class HttpFSExceptionProvider extends ExceptionProvider {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
private static Logger LOG = LoggerFactory.getLogger(HttpFSExceptionProvider.class);

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator.DelegationTokenOperation;
@ -52,6 +53,7 @@
* If not delegation token is present in the request it delegates to the
* {@link KerberosAuthenticationHandler}
*/
@InterfaceAudience.Private
public class HttpFSKerberosAuthenticationHandler
extends KerberosAuthenticationHandler {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation;
import org.apache.hadoop.lib.wsrs.BooleanParam;
@ -38,6 +39,7 @@
* HttpFS ParametersProvider.
*/
@Provider
@InterfaceAudience.Private
public class HttpFSParametersProvider extends ParametersProvider {
private static final Map<Enum, Class<Param<?>>[]> PARAMS_DEF =
@ -85,6 +87,7 @@ public HttpFSParametersProvider() {
/**
* Class for access-time parameter.
*/
@InterfaceAudience.Private
public static class AccessTimeParam extends LongParam {
/**
@ -102,6 +105,7 @@ public AccessTimeParam() {
/**
* Class for block-size parameter.
*/
@InterfaceAudience.Private
public static class BlockSizeParam extends LongParam {
/**
@ -120,6 +124,7 @@ public BlockSizeParam() {
/**
* Class for data parameter.
*/
@InterfaceAudience.Private
public static class DataParam extends BooleanParam {
/**
@ -138,6 +143,7 @@ public DataParam() {
/**
* Class for operation parameter.
*/
@InterfaceAudience.Private
public static class OperationParam extends EnumParam<HttpFSFileSystem.Operation> {
/**
@ -156,6 +162,7 @@ public OperationParam(String operation) {
/**
* Class for delete's recursive parameter.
*/
@InterfaceAudience.Private
public static class RecursiveParam extends BooleanParam {
/**
@ -174,6 +181,7 @@ public RecursiveParam() {
/**
* Class for do-as parameter.
*/
@InterfaceAudience.Private
public static class DoAsParam extends StringParam {
/**
@ -208,6 +216,7 @@ public String parseParam(String str) {
/**
* Class for filter parameter.
*/
@InterfaceAudience.Private
public static class FilterParam extends StringParam {
/**
@ -227,6 +236,7 @@ public FilterParam() {
/**
* Class for group parameter.
*/
@InterfaceAudience.Private
public static class GroupParam extends StringParam {
/**
@ -246,6 +256,7 @@ public GroupParam() {
/**
* Class for len parameter.
*/
@InterfaceAudience.Private
public static class LenParam extends LongParam {
/**
@ -264,6 +275,7 @@ public LenParam() {
/**
* Class for modified-time parameter.
*/
@InterfaceAudience.Private
public static class ModifiedTimeParam extends LongParam {
/**
@ -282,6 +294,7 @@ public ModifiedTimeParam() {
/**
* Class for offset parameter.
*/
@InterfaceAudience.Private
public static class OffsetParam extends LongParam {
/**
@ -300,6 +313,7 @@ public OffsetParam() {
/**
* Class for overwrite parameter.
*/
@InterfaceAudience.Private
public static class OverwriteParam extends BooleanParam {
/**
@ -318,6 +332,7 @@ public OverwriteParam() {
/**
* Class for owner parameter.
*/
@InterfaceAudience.Private
public static class OwnerParam extends StringParam {
/**
@ -337,6 +352,7 @@ public OwnerParam() {
/**
* Class for permission parameter.
*/
@InterfaceAudience.Private
public static class PermissionParam extends ShortParam {
/**
@ -357,6 +373,7 @@ public PermissionParam() {
/**
* Class for replication parameter.
*/
@InterfaceAudience.Private
public static class ReplicationParam extends ShortParam {
/**
@ -375,6 +392,7 @@ public ReplicationParam() {
/**
* Class for to-path parameter.
*/
@InterfaceAudience.Private
public static class DestinationParam extends StringParam {
/**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
@ -25,6 +26,7 @@
* Filter that releases FileSystemAccess filesystem instances upon HTTP request
* completion.
*/
@InterfaceAudience.Private
public class HttpFSReleaseFilter extends FileSystemReleaseFilter {
/**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
@ -82,6 +83,7 @@
* different operations.
*/
@Path(HttpFSFileSystem.SERVICE_VERSION)
@InterfaceAudience.Private
public class HttpFSServer {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.lib.server.ServerException;
@ -39,6 +40,7 @@
* All the configuration is loaded from configuration properties prefixed
* with <code>httpfs.</code>.
*/
@InterfaceAudience.Private
public class HttpFSServerWebApp extends ServerWebApp {
private static final Logger LOG =
LoggerFactory.getLogger(HttpFSServerWebApp.class);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.util.concurrent.Callable;
@ -26,6 +27,7 @@
* Adapter class that allows <code>Runnable</code>s and <code>Callable</code>s to
* be treated as the other.
*/
@InterfaceAudience.Private
public class RunnableCallable implements Callable<Void>, Runnable {
private Runnable runnable;
private Callable<?> callable;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.text.MessageFormat;
@ -26,6 +27,7 @@
* Generic exception that requires error codes and uses the a message
* template from the error code.
*/
@InterfaceAudience.Private
public class XException extends Exception {
/**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@ -26,6 +27,7 @@
/**
* Convenience class implementing the {@link Service} interface.
*/
@InterfaceAudience.Private
public abstract class BaseService implements Service {
private String prefix;
private Server server;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@ -76,6 +77,7 @@
* post-initialized (this enables late/conditional service bindings).
* <p/>
*/
@InterfaceAudience.Private
public class Server {
private Logger log;
@ -97,7 +99,8 @@ public class Server {
/**
* Enumeration that defines the server status.
*/
public enum Status {
@InterfaceAudience.Private
public static enum Status {
UNDEF(false, false),
BOOTING(false, true),
HALTED(true, true),

View File

@ -18,16 +18,19 @@
package org.apache.hadoop.lib.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by the {@link Server} class.
*/
@InterfaceAudience.Private
public class ServerException extends XException {
/**
* Error codes use by the {@link Server} class.
*/
@InterfaceAudience.Private
public static enum ERROR implements XException.ERROR {
S01("Dir [{0}] does not exist"),
S02("[{0}] is not a directory"),

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.lib.server;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Service interface for components to be managed by the {@link Server} class.
*/
@InterfaceAudience.Private
public interface Service {
/**

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.lib.server;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by {@link Service} implementations.
*/
@InterfaceAudience.Private
public class ServiceException extends ServerException {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
@ -24,6 +25,7 @@
/**
* HttpFS <code>DelegationTokenIdentifier</code> implementation.
*/
@InterfaceAudience.Private
public class DelegationTokenIdentifier
extends AbstractDelegationTokenIdentifier {

View File

@ -17,12 +17,14 @@
*/
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
/**
* Service interface to manage HttpFS delegation tokens.
*/
@InterfaceAudience.Private
public interface DelegationTokenManager {
/**

View File

@ -17,11 +17,13 @@
*/
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by the {@link DelegationTokenManager} service implementation.
*/
@InterfaceAudience.Private
public class DelegationTokenManagerException extends XException {
public enum ERROR implements XException.ERROR {

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import java.io.IOException;
@InterfaceAudience.Private
public interface FileSystemAccess {
public interface FileSystemExecutor<T> {

View File

@ -18,8 +18,10 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
@InterfaceAudience.Private
public class FileSystemAccessException extends XException {
public enum ERROR implements XException.ERROR {

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import java.io.IOException;
import java.util.List;
@InterfaceAudience.Private
public interface Groups {
public List<String> getGroups(String user) throws IOException;

View File

@ -18,8 +18,11 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import java.util.Map;
@InterfaceAudience.Private
public interface Instrumentation {
public interface Cron {

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import java.io.IOException;
import java.security.AccessControlException;
@InterfaceAudience.Private
public interface ProxyUser {
public void validate(String proxyUser, String proxyHost, String doAsUser) throws IOException, AccessControlException;

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
import org.apache.hadoop.classification.InterfaceAudience;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
@InterfaceAudience.Private
public interface Scheduler {
public abstract void schedule(Callable<?> callable, long delay, long interval, TimeUnit unit);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.hadoop;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
@ -47,6 +48,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@InterfaceAudience.Private
public class FileSystemAccessService extends BaseService implements FileSystemAccess {
private static final Logger LOG = LoggerFactory.getLogger(FileSystemAccessService.class);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.instrumentation;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
import org.apache.hadoop.lib.service.Instrumentation;
@ -39,6 +40,7 @@
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
@InterfaceAudience.Private
public class InstrumentationService extends BaseService implements Instrumentation {
public static final String PREFIX = "instrumentation";
public static final String CONF_TIMERS_SIZE = "timers.size";

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.scheduler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.RunnableCallable;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.Server;
@ -35,6 +36,7 @@
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@InterfaceAudience.Private
public class SchedulerService extends BaseService implements Scheduler {
private static final Logger LOG = LoggerFactory.getLogger(SchedulerService.class);

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.lib.service.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.lib.server.BaseService;
@ -37,6 +38,7 @@
/**
* DelegationTokenManager service implementation.
*/
@InterfaceAudience.Private
public class DelegationTokenManagerService extends BaseService
implements DelegationTokenManager {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@ -27,6 +28,7 @@
import java.io.IOException;
import java.util.List;
@InterfaceAudience.Private
public class GroupsService extends BaseService implements Groups {
private static final String PREFIX = "groups";

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@ -38,10 +39,12 @@
import java.util.Map;
import java.util.Set;
@InterfaceAudience.Private
public class ProxyUserService extends BaseService implements ProxyUser {
private static Logger LOG = LoggerFactory.getLogger(ProxyUserService.class);
public enum ERROR implements XException.ERROR {
@InterfaceAudience.Private
public static enum ERROR implements XException.ERROR {
PRXU01("Could not normalize host name [{0}], {1}"),
PRXU02("Missing [{0}] property");

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.lib.service.FileSystemAccess;
@ -37,6 +38,7 @@
* is streaming out HDFS data and the corresponding filesystem
* instance have to be closed after the streaming completes.
*/
@InterfaceAudience.Private
public abstract class FileSystemReleaseFilter implements Filter {
private static final ThreadLocal<FileSystem> FILE_SYSTEM_TL = new ThreadLocal<FileSystem>();

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.lib.servlet;
import org.apache.hadoop.classification.InterfaceAudience;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
@ -31,6 +33,7 @@
/**
* Filter that resolves the requester hostname.
*/
@InterfaceAudience.Private
public class HostnameFilter implements Filter {
static final ThreadLocal<String> HOSTNAME_TL = new ThreadLocal<String>();

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.MDC;
import javax.servlet.Filter;
@ -42,6 +43,7 @@
* <li>path: the path of the request URL</li>
* </ul>
*/
@InterfaceAudience.Private
public class MDCFilter implements Filter {
/**

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.lib.servlet;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.Server;
import org.apache.hadoop.lib.server.ServerException;
@ -34,6 +35,7 @@
* {@link Server} subclass that implements <code>ServletContextListener</code>
* and uses its lifecycle to start and stop the server.
*/
@InterfaceAudience.Private
public abstract class ServerWebApp extends Server implements ServletContextListener {
private static final String HOME_DIR = ".home.dir";

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.lib.util;
import org.apache.hadoop.classification.InterfaceAudience;
import java.text.MessageFormat;
import java.util.List;
import java.util.regex.Pattern;
@ -27,6 +29,7 @@
* <p/>
* Commonly used for method arguments preconditions.
*/
@InterfaceAudience.Private
public class Check {
/**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
@ -37,6 +38,7 @@
/**
* Configuration utilities.
*/
@InterfaceAudience.Private
public abstract class ConfigurationUtils {
/**

View File

@ -18,8 +18,11 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import java.text.MessageFormat;
@InterfaceAudience.Private
public abstract class BooleanParam extends Param<Boolean> {
public BooleanParam(String name, Boolean defaultValue) {

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public abstract class ByteParam extends Param<Byte> {
public ByteParam(String name, Byte defaultValue) {

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.StringUtils;
import java.util.Arrays;
@InterfaceAudience.Private
public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
Class<E> klass;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -28,6 +29,7 @@
import java.util.LinkedHashMap;
import java.util.Map;
@InterfaceAudience.Private
public class ExceptionProvider implements ExceptionMapper<Throwable> {
private static Logger LOG = LoggerFactory.getLogger(ExceptionProvider.class);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IOUtils;
import javax.ws.rs.core.StreamingOutput;
@ -25,6 +26,7 @@
import java.io.InputStream;
import java.io.OutputStream;
@InterfaceAudience.Private
public class InputStreamEntity implements StreamingOutput {
private InputStream is;
private long offset;

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public abstract class IntegerParam extends Param<Integer> {
public IntegerParam(String name, Integer defaultValue) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.json.simple.JSONObject;
import javax.ws.rs.Produces;
@ -36,6 +37,7 @@
@Provider
@Produces(MediaType.APPLICATION_JSON)
@InterfaceAudience.Private
public class JSONMapProvider implements MessageBodyWriter<Map> {
private static final String ENTER = System.getProperty("line.separator");

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.json.simple.JSONStreamAware;
import javax.ws.rs.Produces;
@ -35,6 +36,7 @@
@Provider
@Produces(MediaType.APPLICATION_JSON)
@InterfaceAudience.Private
public class JSONProvider implements MessageBodyWriter<JSONStreamAware> {
private static final String ENTER = System.getProperty("line.separator");

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public abstract class LongParam extends Param<Long> {
public LongParam(String name, Long defaultValue) {

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.classification.InterfaceAudience;
import java.text.MessageFormat;
@InterfaceAudience.Private
public abstract class Param<T> {
private String name;
protected T value;

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import java.util.Map;
/**
@ -24,6 +26,7 @@
* <p/>
* Instances are created by the {@link ParametersProvider} class.
*/
@InterfaceAudience.Private
public class Parameters {
private Map<String, Param<?>> params;

View File

@ -24,6 +24,7 @@
import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
import com.sun.jersey.spi.inject.Injectable;
import com.sun.jersey.spi.inject.InjectableProvider;
import org.apache.hadoop.classification.InterfaceAudience;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MultivaluedMap;
@ -36,6 +37,7 @@
* Jersey provider that parses the request parameters based on the
* given parameter definition.
*/
@InterfaceAudience.Private
public class ParametersProvider
extends AbstractHttpContextInjectable<Parameters>
implements InjectableProvider<Context, Type> {

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public abstract class ShortParam extends Param<Short> {
private int radix;

View File

@ -17,9 +17,12 @@
*/
package org.apache.hadoop.lib.wsrs;
import org.apache.hadoop.classification.InterfaceAudience;
import java.text.MessageFormat;
import java.util.regex.Pattern;
@InterfaceAudience.Private
public abstract class StringParam extends Param<String> {
private Pattern pattern;

View File

@ -24,6 +24,7 @@
import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
import com.sun.jersey.spi.inject.Injectable;
import com.sun.jersey.spi.inject.InjectableProvider;
import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.MDC;
import javax.ws.rs.core.Context;
@ -33,6 +34,7 @@
import java.util.regex.Pattern;
@Provider
@InterfaceAudience.Private
public class UserProvider extends AbstractHttpContextInjectable<Principal> implements
InjectableProvider<Context, Type> {