diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScannable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScannable.java
index c2b895edca8..b0ac70f89cc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScannable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScannable.java
@@ -17,12 +17,15 @@
*/
package org.apache.hadoop.hbase;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Implementer can return a CellScanner over its Cell content.
* Class name is ugly but mimicing java.util.Iterable only we are about the dumber
* CellScanner rather than say Iterator. See CellScanner class comment for why we go
* dumber than java.util.Iterator.
*/
+@InterfaceAudience.Private
public interface CellScannable {
/**
* @return A CellScanner over the contained {@link Cell}s
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
index e5fc2a51848..281340bc079 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
@@ -23,8 +23,13 @@ import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
+/**
+ * TODO javadoc
+ */
+@InterfaceAudience.Private
public abstract class BaseDecoder implements Codec.Decoder {
protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
protected final InputStream in;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java
index a2430dc3ab8..b24a917e882 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java
@@ -20,8 +20,14 @@ package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.OutputStream;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
+/**
+ * TODO javadoc
+ */
+
+@InterfaceAudience.Private
public abstract class BaseEncoder implements Codec.Encoder {
protected final OutputStream out;
// This encoder is 'done' once flush has been called.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
index 0f161df3709..af8db090929 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
@@ -22,6 +22,7 @@ import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.util.Bytes;
@@ -30,6 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints
* delimiting all lengths. Profligate. Needs tune up.
*/
+@InterfaceAudience.Private
public class CellCodec implements Codec {
static class CellEncoder extends BaseEncoder {
CellEncoder(final OutputStream out) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
index 5a6b71ac760..d52ef117282 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec;
import java.io.InputStream;
import java.io.OutputStream;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
* and without presuming an hfile context. Intent is an Interface that will work for hfile and
* rpc.
*/
+@InterfaceAudience.Private
public interface Codec {
// TODO: interfacing with {@link DataBlockEncoder}
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java
index 8124d9e3378..7edcdc49a13 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java
@@ -18,12 +18,14 @@
package org.apache.hadoop.hbase.codec;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* Thrown when problems in the codec whether setup or context.
*/
@SuppressWarnings("serial")
+@InterfaceAudience.Private
public class CodecException extends HBaseIOException {
public CodecException() {
super();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
index 0cf2dae857d..f1626c2e374 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
@@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@@ -42,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValueUtil;
* KeyValue2 backing array
*
*/
+@InterfaceAudience.Private
public class KeyValueCodec implements Codec {
public static class KeyValueEncoder extends BaseEncoder {
public KeyValueEncoder(final OutputStream out) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
index 4b9c9e47dbd..75064be9d1e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
/**
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.io.compress.Compression;
*
* @see HFileBlockEncodingContext for encoding
*/
+@InterfaceAudience.Private
public interface HFileBlockDecodingContext {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
index 475fe1751de..81794f391fd 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@@ -32,6 +33,7 @@ import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
* @see HFileBlockDefaultEncodingContext for the default compression context
*
*/
+@InterfaceAudience.Private
public class HFileBlockDefaultDecodingContext implements
HFileBlockDecodingContext {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
index 086e6e43420..43fc82346d6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
@@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.BlockType;
@@ -37,6 +38,7 @@ import com.google.common.base.Preconditions;
* @see HFileBlockDefaultDecodingContext for the decompression part
*
*/
+@InterfaceAudience.Private
public class HFileBlockDefaultEncodingContext implements
HFileBlockEncodingContext {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
index b76d5ce8b66..66a9cfcead2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.io.OutputStream;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.BlockType;
@@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockType;
* @see HFileBlockDecodingContext for decoding
*
*/
+@InterfaceAudience.Private
public interface HFileBlockEncodingContext {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
index eae0049b114..7958ce079c5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
@@ -21,6 +21,13 @@ package org.apache.hadoop.hbase.util;
import java.util.ArrayList;
import java.util.Arrays;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * A set of array utility functions that return reasonable values in cases where an array is
+ * allocated or if it is null
+ */
+@InterfaceAudience.Private
public class ArrayUtils {
public static int length(byte[] a) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
index d3c32c3ede3..f88aaa707ed 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
@@ -23,9 +23,12 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Utility methods for dealing with Collections, including treating null collections as empty.
*/
+@InterfaceAudience.Private
public class CollectionUtils {
private static final List |
*/
+@InterfaceAudience.Private
public class KeyLocker> {
private static final Log LOG = LogFactory.getLog(KeyLocker.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index e7762901c2c..cda54245abb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.hbase.util;
import java.lang.reflect.InvocationTargetException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
public class ReflectionUtils {
@SuppressWarnings("unchecked")
public static T instantiateWithCustomCtor(String className,
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
index bb7555396b7..755efbe3520 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
@@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.util;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Utility class to manage a triple.
*/
+@InterfaceAudience.Private
public class Triple {
private A first;
private B second;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
index e5941d5e1a0..e19ce8e3402 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
@@ -18,10 +18,13 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Set;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* A generator of random data (keys/cfs/columns/values) for load testing.
* Contains LoadTestKVGenerator as a matter of convenience...
*/
+@InterfaceAudience.Private
public abstract class LoadTestDataGenerator {
protected final LoadTestKVGenerator kvGenerator;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
index 6a30ca3c603..e2a75708af5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.util.test;
import java.util.Random;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash;
@@ -28,6 +29,7 @@ import org.apache.hadoop.hbase.util.MD5Hash;
* and generating a pseudo-random sequence of bytes seeded by key, column
* qualifier, and value size.
*/
+@InterfaceAudience.Private
public class LoadTestKVGenerator {
/** A random number generator for determining value size */
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
index fce7283fd2e..48fa3e9961a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
@@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.io.WritableUtils;
@@ -37,6 +38,7 @@ import com.google.common.primitives.Bytes;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
justification="Should probably fix")
+@InterfaceAudience.Private
public class RedundantKVGenerator {
// row settings
static byte[] DEFAULT_COMMON_PREFIX = new byte[0];