HBASE-9516 Mark hbase-common classes missing @InterfaceAudience annotation as Private

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1522693 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-09-12 18:35:41 +00:00
parent 80b4a529fd
commit 07fd6b6a04
21 changed files with 61 additions and 0 deletions

View File

@ -17,12 +17,15 @@
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Implementer can return a CellScanner over its Cell content.
* Class name is ugly but mimicing java.util.Iterable only we are about the dumber
* CellScanner rather than say Iterator<Cell>. See CellScanner class comment for why we go
* dumber than java.util.Iterator.
*/
@InterfaceAudience.Private
public interface CellScannable {
/**
* @return A CellScanner over the contained {@link Cell}s

View File

@ -23,8 +23,13 @@ import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
/**
* TODO javadoc
*/
@InterfaceAudience.Private
public abstract class BaseDecoder implements Codec.Decoder {
protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
protected final InputStream in;

View File

@ -20,8 +20,14 @@ package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
/**
* TODO javadoc
*/
@InterfaceAudience.Private
public abstract class BaseEncoder implements Codec.Encoder {
protected final OutputStream out;
// This encoder is 'done' once flush has been called.

View File

@ -22,6 +22,7 @@ import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.util.Bytes;
@ -30,6 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints
* delimiting all lengths. Profligate. Needs tune up.
*/
@InterfaceAudience.Private
public class CellCodec implements Codec {
static class CellEncoder extends BaseEncoder {
CellEncoder(final OutputStream out) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
* and without presuming an hfile context. Intent is an Interface that will work for hfile and
* rpc.
*/
@InterfaceAudience.Private
public interface Codec {
// TODO: interfacing with {@link DataBlockEncoder}
/**

View File

@ -18,12 +18,14 @@
package org.apache.hadoop.hbase.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* Thrown when problems in the codec whether setup or context.
*/
@SuppressWarnings("serial")
@InterfaceAudience.Private
public class CodecException extends HBaseIOException {
public CodecException() {
super();

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -42,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValueUtil;
* KeyValue2 backing array
* </pre>
*/
@InterfaceAudience.Private
public class KeyValueCodec implements Codec {
public static class KeyValueEncoder extends BaseEncoder {
public KeyValueEncoder(final OutputStream out) {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
/**
@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.io.compress.Compression;
*
* @see HFileBlockEncodingContext for encoding
*/
@InterfaceAudience.Private
public interface HFileBlockDecodingContext {
/**

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@ -32,6 +33,7 @@ import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
* @see HFileBlockDefaultEncodingContext for the default compression context
*
*/
@InterfaceAudience.Private
public class HFileBlockDefaultDecodingContext implements
HFileBlockDecodingContext {

View File

@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.BlockType;
@ -37,6 +38,7 @@ import com.google.common.base.Preconditions;
* @see HFileBlockDefaultDecodingContext for the decompression part
*
*/
@InterfaceAudience.Private
public class HFileBlockDefaultEncodingContext implements
HFileBlockEncodingContext {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.BlockType;
@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockType;
* @see HFileBlockDecodingContext for decoding
*
*/
@InterfaceAudience.Private
public interface HFileBlockEncodingContext {
/**

View File

@ -21,6 +21,13 @@ package org.apache.hadoop.hbase.util;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A set of array utility functions that return reasonable values in cases where an array is
* allocated or if it is null
*/
@InterfaceAudience.Private
public class ArrayUtils {
public static int length(byte[] a) {

View File

@ -23,9 +23,12 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Utility methods for dealing with Collections, including treating null collections as empty.
*/
@InterfaceAudience.Private
public class CollectionUtils {
private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(

View File

@ -21,10 +21,13 @@ package org.apache.hadoop.hbase.util;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A simple barrier that can be used by classes that need to wait for some operations to
* finish before stopping/closing/etc. forever.
*/
@InterfaceAudience.Private
public class DrainBarrier {
/**
* Contains the number of outstanding operations, as well as flags.

View File

@ -22,9 +22,12 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Utility methods for Iterable including null-safe handlers.
*/
@InterfaceAudience.Private
public class IterableUtils {
private static final List<Object> EMPTY_LIST = Collections

View File

@ -30,6 +30,7 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A utility class to manage a set of locks. Each lock is identified by a String which serves
@ -48,6 +49,7 @@ import org.apache.commons.logging.LogFactory;
* }
* </p>
*/
@InterfaceAudience.Private
public class KeyLocker<K extends Comparable<? super K>> {
private static final Log LOG = LogFactory.getLog(KeyLocker.class);

View File

@ -20,6 +20,9 @@ package org.apache.hadoop.hbase.util;
import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public class ReflectionUtils {
@SuppressWarnings("unchecked")
public static <T> T instantiateWithCustomCtor(String className,

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Utility class to manage a triple.
*/
@InterfaceAudience.Private
public class Triple<A, B, C> {
private A first;
private B second;

View File

@ -18,10 +18,13 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A generator of random data (keys/cfs/columns/values) for load testing.
* Contains LoadTestKVGenerator as a matter of convenience...
*/
@InterfaceAudience.Private
public abstract class LoadTestDataGenerator {
protected final LoadTestKVGenerator kvGenerator;

View File

@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash;
@ -28,6 +29,7 @@ import org.apache.hadoop.hbase.util.MD5Hash;
* and generating a pseudo-random sequence of bytes seeded by key, column
* qualifier, and value size.
*/
@InterfaceAudience.Private
public class LoadTestKVGenerator {
/** A random number generator for determining value size */

View File

@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.io.WritableUtils;
@ -37,6 +38,7 @@ import com.google.common.primitives.Bytes;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
justification="Should probably fix")
@InterfaceAudience.Private
public class RedundantKVGenerator {
// row settings
static byte[] DEFAULT_COMMON_PREFIX = new byte[0];