HBASE-9516 Mark hbase-common classes missing @InterfaceAudience annotation as Private

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1522693 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-09-12 18:35:41 +00:00
parent 80b4a529fd
commit 07fd6b6a04
21 changed files with 61 additions and 0 deletions

View File

@ -17,12 +17,15 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Implementer can return a CellScanner over its Cell content. * Implementer can return a CellScanner over its Cell content.
* Class name is ugly but mimicing java.util.Iterable only we are about the dumber * Class name is ugly but mimicing java.util.Iterable only we are about the dumber
* CellScanner rather than say Iterator<Cell>. See CellScanner class comment for why we go * CellScanner rather than say Iterator<Cell>. See CellScanner class comment for why we go
* dumber than java.util.Iterator. * dumber than java.util.Iterator.
*/ */
@InterfaceAudience.Private
public interface CellScannable { public interface CellScannable {
/** /**
* @return A CellScanner over the contained {@link Cell}s * @return A CellScanner over the contained {@link Cell}s

View File

@ -23,8 +23,13 @@ import java.io.InputStream;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
/**
* TODO javadoc
*/
@InterfaceAudience.Private
public abstract class BaseDecoder implements Codec.Decoder { public abstract class BaseDecoder implements Codec.Decoder {
protected static final Log LOG = LogFactory.getLog(BaseDecoder.class); protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
protected final InputStream in; protected final InputStream in;

View File

@ -20,8 +20,14 @@ package org.apache.hadoop.hbase.codec;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
/**
* TODO javadoc
*/
@InterfaceAudience.Private
public abstract class BaseEncoder implements Codec.Encoder { public abstract class BaseEncoder implements Codec.Encoder {
protected final OutputStream out; protected final OutputStream out;
// This encoder is 'done' once flush has been called. // This encoder is 'done' once flush has been called.

View File

@ -22,6 +22,7 @@ import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -30,6 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints * Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints
* delimiting all lengths. Profligate. Needs tune up. * delimiting all lengths. Profligate. Needs tune up.
*/ */
@InterfaceAudience.Private
public class CellCodec implements Codec { public class CellCodec implements Codec {
static class CellEncoder extends BaseEncoder { static class CellEncoder extends BaseEncoder {
CellEncoder(final OutputStream out) { CellEncoder(final OutputStream out) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
* and without presuming an hfile context. Intent is an Interface that will work for hfile and * and without presuming an hfile context. Intent is an Interface that will work for hfile and
* rpc. * rpc.
*/ */
@InterfaceAudience.Private
public interface Codec { public interface Codec {
// TODO: interfacing with {@link DataBlockEncoder} // TODO: interfacing with {@link DataBlockEncoder}
/** /**

View File

@ -18,12 +18,14 @@
package org.apache.hadoop.hbase.codec; package org.apache.hadoop.hbase.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
/** /**
* Thrown when problems in the codec whether setup or context. * Thrown when problems in the codec whether setup or context.
*/ */
@SuppressWarnings("serial") @SuppressWarnings("serial")
@InterfaceAudience.Private
public class CodecException extends HBaseIOException { public class CodecException extends HBaseIOException {
public CodecException() { public CodecException() {
super(); super();

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
@ -42,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValueUtil;
* KeyValue2 backing array * KeyValue2 backing array
* </pre> * </pre>
*/ */
@InterfaceAudience.Private
public class KeyValueCodec implements Codec { public class KeyValueCodec implements Codec {
public static class KeyValueEncoder extends BaseEncoder { public static class KeyValueEncoder extends BaseEncoder {
public KeyValueEncoder(final OutputStream out) { public KeyValueEncoder(final OutputStream out) {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
/** /**
@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.io.compress.Compression;
* *
* @see HFileBlockEncodingContext for encoding * @see HFileBlockEncodingContext for encoding
*/ */
@InterfaceAudience.Private
public interface HFileBlockDecodingContext { public interface HFileBlockDecodingContext {
/** /**

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@ -32,6 +33,7 @@ import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
* @see HFileBlockDefaultEncodingContext for the default compression context * @see HFileBlockDefaultEncodingContext for the default compression context
* *
*/ */
@InterfaceAudience.Private
public class HFileBlockDefaultDecodingContext implements public class HFileBlockDefaultDecodingContext implements
HFileBlockDecodingContext { HFileBlockDecodingContext {

View File

@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
@ -37,6 +38,7 @@ import com.google.common.base.Preconditions;
* @see HFileBlockDefaultDecodingContext for the decompression part * @see HFileBlockDefaultDecodingContext for the decompression part
* *
*/ */
@InterfaceAudience.Private
public class HFileBlockDefaultEncodingContext implements public class HFileBlockDefaultEncodingContext implements
HFileBlockEncodingContext { HFileBlockEncodingContext {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockType;
* @see HFileBlockDecodingContext for decoding * @see HFileBlockDecodingContext for decoding
* *
*/ */
@InterfaceAudience.Private
public interface HFileBlockEncodingContext { public interface HFileBlockEncodingContext {
/** /**

View File

@ -21,6 +21,13 @@ package org.apache.hadoop.hbase.util;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A set of array utility functions that return reasonable values in cases where an array is
* allocated or if it is null
*/
@InterfaceAudience.Private
public class ArrayUtils { public class ArrayUtils {
public static int length(byte[] a) { public static int length(byte[] a) {

View File

@ -23,9 +23,12 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Utility methods for dealing with Collections, including treating null collections as empty. * Utility methods for dealing with Collections, including treating null collections as empty.
*/ */
@InterfaceAudience.Private
public class CollectionUtils { public class CollectionUtils {
private static final List<Object> EMPTY_LIST = Collections.unmodifiableList( private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(

View File

@ -21,10 +21,13 @@ package org.apache.hadoop.hbase.util;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A simple barrier that can be used by classes that need to wait for some operations to * A simple barrier that can be used by classes that need to wait for some operations to
* finish before stopping/closing/etc. forever. * finish before stopping/closing/etc. forever.
*/ */
@InterfaceAudience.Private
public class DrainBarrier { public class DrainBarrier {
/** /**
* Contains the number of outstanding operations, as well as flags. * Contains the number of outstanding operations, as well as flags.

View File

@ -22,9 +22,12 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Utility methods for Iterable including null-safe handlers. * Utility methods for Iterable including null-safe handlers.
*/ */
@InterfaceAudience.Private
public class IterableUtils { public class IterableUtils {
private static final List<Object> EMPTY_LIST = Collections private static final List<Object> EMPTY_LIST = Collections

View File

@ -30,6 +30,7 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A utility class to manage a set of locks. Each lock is identified by a String which serves * A utility class to manage a set of locks. Each lock is identified by a String which serves
@ -48,6 +49,7 @@ import org.apache.commons.logging.LogFactory;
* } * }
* </p> * </p>
*/ */
@InterfaceAudience.Private
public class KeyLocker<K extends Comparable<? super K>> { public class KeyLocker<K extends Comparable<? super K>> {
private static final Log LOG = LogFactory.getLog(KeyLocker.class); private static final Log LOG = LogFactory.getLog(KeyLocker.class);

View File

@ -20,6 +20,9 @@ package org.apache.hadoop.hbase.util;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public class ReflectionUtils { public class ReflectionUtils {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <T> T instantiateWithCustomCtor(String className, public static <T> T instantiateWithCustomCtor(String className,

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Utility class to manage a triple. * Utility class to manage a triple.
*/ */
@InterfaceAudience.Private
public class Triple<A, B, C> { public class Triple<A, B, C> {
private A first; private A first;
private B second; private B second;

View File

@ -18,10 +18,13 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A generator of random data (keys/cfs/columns/values) for load testing. * A generator of random data (keys/cfs/columns/values) for load testing.
* Contains LoadTestKVGenerator as a matter of convenience... * Contains LoadTestKVGenerator as a matter of convenience...
*/ */
@InterfaceAudience.Private
public abstract class LoadTestDataGenerator { public abstract class LoadTestDataGenerator {
protected final LoadTestKVGenerator kvGenerator; protected final LoadTestKVGenerator kvGenerator;

View File

@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash; import org.apache.hadoop.hbase.util.MD5Hash;
@ -28,6 +29,7 @@ import org.apache.hadoop.hbase.util.MD5Hash;
* and generating a pseudo-random sequence of bytes seeded by key, column * and generating a pseudo-random sequence of bytes seeded by key, column
* qualifier, and value size. * qualifier, and value size.
*/ */
@InterfaceAudience.Private
public class LoadTestKVGenerator { public class LoadTestKVGenerator {
/** A random number generator for determining value size */ /** A random number generator for determining value size */

View File

@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
@ -37,6 +38,7 @@ import com.google.common.primitives.Bytes;
@edu.umd.cs.findbugs.annotations.SuppressWarnings( @edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT", value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
justification="Should probably fix") justification="Should probably fix")
@InterfaceAudience.Private
public class RedundantKVGenerator { public class RedundantKVGenerator {
// row settings // row settings
static byte[] DEFAULT_COMMON_PREFIX = new byte[0]; static byte[] DEFAULT_COMMON_PREFIX = new byte[0];