HBASE-18687 Add @since 2.0.0 to new classes
This commit is contained in:
parent
f74cf679ec
commit
3e1c598d8e
|
@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
/**
|
/**
|
||||||
* The asynchronous meta table accessor. Used to read/write region and assignment information store
|
* The asynchronous meta table accessor. Used to read/write region and assignment information store
|
||||||
* in <code>hbase:meta</code>.
|
* in <code>hbase:meta</code>.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class AsyncMetaTableAccessor {
|
public class AsyncMetaTableAccessor {
|
||||||
|
|
|
@ -52,6 +52,7 @@ import com.google.protobuf.RpcChannel;
|
||||||
* <p>
|
* <p>
|
||||||
* This feature is still under development, so marked as IA.Private. Will change to public when
|
* This feature is still under development, so marked as IA.Private. Will change to public when
|
||||||
* done. Use it with caution.
|
* done. Use it with caution.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncAdmin {
|
public interface AsyncAdmin {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
* For creating {@link AsyncAdmin}. The implementation should have default configurations set before
|
* For creating {@link AsyncAdmin}. The implementation should have default configurations set before
|
||||||
* returning the builder to user. So users are free to only set the configs they care about to
|
* returning the builder to user. So users are free to only set the configs they care about to
|
||||||
* create a new AsyncAdmin instance.
|
* create a new AsyncAdmin instance.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncAdminBuilder {
|
public interface AsyncAdminBuilder {
|
||||||
|
|
|
@ -26,6 +26,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 2.0.0
|
||||||
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class AsyncAdminRequestRetryingCaller<T> extends AsyncRpcRetryingCaller<T> {
|
public class AsyncAdminRequestRetryingCaller<T> extends AsyncRpcRetryingCaller<T> {
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The asynchronous version of Connection.
|
* The asynchronous version of Connection.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncConnection extends Closeable {
|
public interface AsyncConnection extends Closeable {
|
||||||
|
|
|
@ -58,6 +58,7 @@ import com.google.protobuf.RpcChannel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The implementation of AsyncAdmin.
|
* The implementation of AsyncAdmin.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class AsyncHBaseAdmin implements AsyncAdmin {
|
public class AsyncHBaseAdmin implements AsyncAdmin {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterServ
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retry caller for a request call to master.
|
* Retry caller for a request call to master.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class AsyncMasterRequestRpcRetryingCaller<T> extends AsyncRpcRetryingCaller<T> {
|
public class AsyncMasterRequestRpcRetryingCaller<T> extends AsyncRpcRetryingCaller<T> {
|
||||||
|
|
|
@ -28,7 +28,8 @@ import java.util.List;
|
||||||
* 1) If AsyncProcess is set to track errors globally, and not per call (for HTable puts),
|
* 1) If AsyncProcess is set to track errors globally, and not per call (for HTable puts),
|
||||||
* then errors and failed operations in this object will reflect global errors.
|
* then errors and failed operations in this object will reflect global errors.
|
||||||
* 2) If submit call is made with needResults false, results will not be saved.
|
* 2) If submit call is made with needResults false, results will not be saved.
|
||||||
* */
|
* @since 2.0.0
|
||||||
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public interface AsyncRequestFuture {
|
public interface AsyncRequestFuture {
|
||||||
public boolean hasError();
|
public boolean hasError();
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRespon
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Factory to create an AsyncRpcRetryCaller.
|
* Factory to create an AsyncRpcRetryCaller.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class AsyncRpcRetryingCallerFactory {
|
class AsyncRpcRetryingCallerFactory {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
* The implementation should make sure that user can do everything they want to the returned
|
* The implementation should make sure that user can do everything they want to the returned
|
||||||
* {@code CompletableFuture} without breaking anything. Usually the implementation will require user
|
* {@code CompletableFuture} without breaking anything. Usually the implementation will require user
|
||||||
* to provide a {@code ExecutorService}.
|
* to provide a {@code ExecutorService}.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncTable extends AsyncTableBase {
|
public interface AsyncTable extends AsyncTableBase {
|
||||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
* <p>
|
* <p>
|
||||||
* Usually the implementation will not throw any exception directly. You need to get the exception
|
* Usually the implementation will not throw any exception directly. You need to get the exception
|
||||||
* from the returned {@link CompletableFuture}.
|
* from the returned {@link CompletableFuture}.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncTableBase {
|
public interface AsyncTableBase {
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
* The implementation should have default configurations set before returning the builder to user.
|
* The implementation should have default configurations set before returning the builder to user.
|
||||||
* So users are free to only set the configs they care about to create a new
|
* So users are free to only set the configs they care about to create a new
|
||||||
* AsyncTable/RawAsyncTable instance.
|
* AsyncTable/RawAsyncTable instance.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncTableBuilder<T extends AsyncTableBase> {
|
public interface AsyncTableBuilder<T extends AsyncTableBase> {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
* <p>
|
* <p>
|
||||||
* Usually the implementations will not throw any exception directly, you need to get the exception
|
* Usually the implementations will not throw any exception directly, you need to get the exception
|
||||||
* from the returned {@link CompletableFuture}.
|
* from the returned {@link CompletableFuture}.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface AsyncTableRegionLocator {
|
public interface AsyncTableRegionLocator {
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
* <p>
|
* <p>
|
||||||
* If user setBatch(5) and rpc returns 3+5+5+5+3 cells, we should return 5+5+5+5+1 to user. setBatch
|
* If user setBatch(5) and rpc returns 3+5+5+5+3 cells, we should return 5+5+5+5+1 to user. setBatch
|
||||||
* doesn't mean setAllowPartialResult(true).
|
* doesn't mean setAllowPartialResult(true).
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class BatchScanResultCache implements ScanResultCache {
|
public class BatchScanResultCache implements ScanResultCache {
|
||||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
* It is used as input when creating a table or adding a column.
|
* It is used as input when creating a table or adding a column.
|
||||||
*
|
*
|
||||||
* To construct a new instance, use the {@link ColumnFamilyDescriptorBuilder} methods
|
* To construct a new instance, use the {@link ColumnFamilyDescriptorBuilder} methods
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface ColumnFamilyDescriptor {
|
public interface ColumnFamilyDescriptor {
|
||||||
|
|
|
@ -41,6 +41,9 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.PrettyPrinter;
|
import org.apache.hadoop.hbase.util.PrettyPrinter;
|
||||||
import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
|
import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 2.0.0
|
||||||
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public class ColumnFamilyDescriptorBuilder {
|
public class ColumnFamilyDescriptorBuilder {
|
||||||
// For future backward compatibility
|
// For future backward compatibility
|
||||||
|
|
|
@ -255,6 +255,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The implementation of AsyncAdmin.
|
* The implementation of AsyncAdmin.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class RawAsyncHBaseAdmin implements AsyncAdmin {
|
public class RawAsyncHBaseAdmin implements AsyncAdmin {
|
||||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
* method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat)
|
* method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat)
|
||||||
* so it is not suitable for a normal user. If it is still the only difference after we implement
|
* so it is not suitable for a normal user. If it is still the only difference after we implement
|
||||||
* most features of AsyncTable, we can think about merge these two interfaces.
|
* most features of AsyncTable, we can think about merge these two interfaces.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface RawAsyncTable extends AsyncTableBase {
|
public interface RawAsyncTable extends AsyncTableBase {
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
* HBase in background while you process the returned data, you need to move the processing work to
|
* HBase in background while you process the returned data, you need to move the processing work to
|
||||||
* another thread to make the {@code onNext} call return immediately. And please do NOT do any time
|
* another thread to make the {@code onNext} call return immediately. And please do NOT do any time
|
||||||
* consuming tasks in all methods below unless you know what you are doing.
|
* consuming tasks in all methods below unless you know what you are doing.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public interface RawScanResultConsumer {
|
public interface RawScanResultConsumer {
|
||||||
|
|
|
@ -47,6 +47,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 2.0.0
|
||||||
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public class TableDescriptorBuilder {
|
public class TableDescriptorBuilder {
|
||||||
public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
|
public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Netty client for the requests and responses.
|
* Netty client for the requests and responses.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
|
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
|
||||||
public class NettyRpcClient extends AbstractRpcClient<NettyRpcConnection> {
|
public class NettyRpcClient extends AbstractRpcClient<NettyRpcConnection> {
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
* As hadoop Configuration can not pass an Object directly, we need to find a way to pass the
|
* As hadoop Configuration can not pass an Object directly, we need to find a way to pass the
|
||||||
* EventLoopGroup to {@code AsyncRpcClient} if we want to use a single {@code EventLoopGroup} for
|
* EventLoopGroup to {@code AsyncRpcClient} if we want to use a single {@code EventLoopGroup} for
|
||||||
* the whole process.
|
* the whole process.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
public class NettyRpcClientConfigHelper {
|
public class NettyRpcClientConfigHelper {
|
||||||
|
|
|
@ -65,6 +65,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
* <p>
|
* <p>
|
||||||
* Most operations are executed in handlers. Netty handler is always executed in the same
|
* Most operations are executed in handlers. Netty handler is always executed in the same
|
||||||
* thread(EventLoop) so no lock is needed.
|
* thread(EventLoop) so no lock is needed.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyRpcConnection extends RpcConnection {
|
class NettyRpcConnection extends RpcConnection {
|
||||||
|
|
|
@ -48,6 +48,7 @@ import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The netty rpc handler.
|
* The netty rpc handler.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyRpcDuplexHandler extends ChannelDuplexHandler {
|
class NettyRpcDuplexHandler extends ChannelDuplexHandler {
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implement logic to deal with the rpc connection header.
|
* Implement logic to deal with the rpc connection header.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class NettyHBaseRpcConnectionHeaderHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
public class NettyHBaseRpcConnectionHeaderHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implement SASL logic for netty rpc client.
|
* Implement SASL logic for netty rpc client.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
|
public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implement SASL logic for netty rpc client.
|
* Implement SASL logic for netty rpc client.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
||||||
|
|
|
@ -57,6 +57,7 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An RPC server with Netty4 implementation.
|
* An RPC server with Netty4 implementation.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class NettyRpcServer extends RpcServer {
|
public class NettyRpcServer extends RpcServer {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle connection preamble.
|
* Handle connection preamble.
|
||||||
|
* @since 2.0.0`
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyRpcServerPreambleHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
class NettyRpcServerPreambleHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Decoder for rpc request.
|
* Decoder for rpc request.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyRpcServerRequestDecoder extends ChannelInboundHandlerAdapter {
|
class NettyRpcServerRequestDecoder extends ChannelInboundHandlerAdapter {
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encoder for {@link RpcResponse}.
|
* Encoder for {@link RpcResponse}.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyRpcServerResponseEncoder extends ChannelOutboundHandlerAdapter {
|
class NettyRpcServerResponseEncoder extends ChannelOutboundHandlerAdapter {
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.htrace.TraceInfo;
|
||||||
/**
|
/**
|
||||||
* Datastructure that holds all necessary to a method invocation and then afterward, carries the
|
* Datastructure that holds all necessary to a method invocation and then afterward, carries the
|
||||||
* result.
|
* result.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyServerCall extends ServerCall<NettyServerRpcConnection> {
|
class NettyServerCall extends ServerCall<NettyServerRpcConnection> {
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.htrace.TraceInfo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RpcConnection implementation for netty rpc server.
|
* RpcConnection implementation for netty rpc server.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
class NettyServerRpcConnection extends ServerRpcConnection {
|
class NettyServerRpcConnection extends ServerRpcConnection {
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper class for passing netty event loop config to {@link AsyncFSWALProvider}.
|
* Helper class for passing netty event loop config to {@link AsyncFSWALProvider}.
|
||||||
|
* @since 2.0.0
|
||||||
*/
|
*/
|
||||||
public class NettyAsyncFSWALConfigHelper {
|
public class NettyAsyncFSWALConfigHelper {
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue