HBASE-9523 Audit of hbase-common @InterfaceAudience.Public apis

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1523411 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-09-15 06:45:14 +00:00
parent b9ad5e8413
commit 1f969f6155
17 changed files with 30 additions and 43 deletions

View File

@ -26,6 +26,7 @@ import java.io.OutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
@ -100,6 +101,8 @@ public final class Compression {
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="SE_TRANSIENT_FIELD_NOT_RESTORED",
justification="We are not serializing so doesn't apply (not sure why transient though)")
@InterfaceAudience.Public
@InterfaceStability.Evolving
public static enum Algorithm {
LZO("lzo") {
// Use base type to avoid compile-time dependencies.

View File

@ -22,6 +22,7 @@ import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -30,7 +31,8 @@ import org.apache.hadoop.hbase.util.Bytes;
* want to add a new algorithm/version, assign it a new id. Announce the new id
* in the HBase mailing list to prevent collisions.
*/
@InterfaceAudience.Private
@InterfaceAudience.Public
@InterfaceStability.Evolving
public enum DataBlockEncoding {
/** Disable data block encoding. */

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.util.PositionedByteRange;
* do not terminate the fields list. Built on
* {@link OrderedBytes#encodeBlobVar(PositionedByteRange, byte[], int, int, Order)}.
*/
@InterfaceAudience.Private
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class OrderedBlobVar extends OrderedBytesBase<byte[]> {

View File

@ -21,13 +21,11 @@ package org.apache.hadoop.hbase.util;
import java.net.InetSocketAddress;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Utility for network addresses, resolving and naming.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class Addressing {
public static final String VALID_PORT_REGEX = "[\\d]+";
public static final String HOSTNAME_PORT_SEPARATOR = ":";

View File

@ -1254,6 +1254,8 @@ public class Base64 {
* @see Base64
* @since 1.3
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public static class Base64InputStream extends FilterInputStream {
private boolean encode; // Encoding or decoding
private int position; // Current position in the buffer
@ -1498,6 +1500,8 @@ public class Base64 {
* @see Base64#DONT_BREAK_LINES
* @since 1.3
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public Base64OutputStream(OutputStream out, int options) {
super(out);
this.breakLines = (options & DONT_BREAK_LINES) != DONT_BREAK_LINES;

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.util.StringUtils;
* buffers are sequential and could be considered as a large buffer.It supports
* reading/writing data from this large buffer with a position and offset
*/
@InterfaceAudience.Public
@InterfaceAudience.Private
public final class ByteBufferArray {
static final Log LOG = LogFactory.getLog(ByteBufferArray.class);

View File

@ -137,6 +137,8 @@ public class Bytes {
/**
* Byte array comparator class.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public static class ByteArrayComparator implements RawComparator<byte []> {
/**
* Constructor
@ -163,6 +165,8 @@ public class Bytes {
// boundaries. Thus semantically, we should treat empty byte array as the smallest value
// while comparing row keys, start keys etc; but as the largest value for comparing
// region boundaries for endKeys.
@InterfaceAudience.Public
@InterfaceStability.Stable
public static class RowEndKeyComparator extends ByteArrayComparator {
@Override
public int compare(byte[] left, byte[] right) {

View File

@ -22,13 +22,11 @@ package org.apache.hadoop.hbase.util;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Class for determining the "size" of a class, an attempt to calculate the
@ -36,8 +34,7 @@ import org.apache.hadoop.classification.InterfaceStability;
*
* The core of this class is taken from the Derby project
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class ClassSize {
static final Log LOG = LogFactory.getLog(ClassSize.class);

View File

@ -21,13 +21,11 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Utilities for class manipulation.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class Classes {
/**

View File

@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Default implementation of an environment edge.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class DefaultEnvironmentEdge implements EnvironmentEdge {

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Has some basic interaction with the environment. Alternate implementations
@ -27,8 +26,7 @@ import org.apache.hadoop.classification.InterfaceStability;
*
* @see EnvironmentEdgeManager
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@InterfaceAudience.Private
public interface EnvironmentEdge {
/**

View File

@ -19,15 +19,13 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Manages a singleton instance of the environment edge. This class shall
* implement static versions of the interface {@link EnvironmentEdge}, then
* defer to the delegate on invocation.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class EnvironmentEdgeManager {
private static volatile EnvironmentEdge delegate = new DefaultEnvironmentEdge();

View File

@ -18,21 +18,18 @@
package org.apache.hadoop.hbase.util;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
@ -42,8 +39,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* depending on the runtime (vendor) used.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class JVM {
private static final Log LOG = LogFactory.getLog(JVM.class);
private OperatingSystemMXBean osMbean;

View File

@ -27,10 +27,8 @@ import java.lang.reflect.UndeclaredThrowableException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class Methods {
private static Log LOG = LogFactory.getLog(Methods.class);

View File

@ -33,7 +33,6 @@ import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A SortedMap implementation that uses Soft Reference values
@ -43,8 +42,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* @param <K> key class
* @param <V> value class
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class SoftValueSortedMap<K,V> implements SortedMap<K,V> {
private final SortedMap<K, SoftValue<K,V>> internalMap;
private final ReferenceQueue<V> rq = new ReferenceQueue<V>();

View File

@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Utility for Strings.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class Strings {
public final static String DEFAULT_SEPARATOR = "=";
public final static String DEFAULT_KEYVALUE_SEPARATOR = ", ";

View File

@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.util;
import java.io.InterruptedIOException;
import java.io.PrintWriter;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.concurrent.LinkedBlockingQueue;
@ -30,14 +29,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.ReflectionUtils;
/**
* Thread Utility
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@InterfaceAudience.Private
public class Threads {
protected static final Log LOG = LogFactory.getLog(Threads.class);
private static final AtomicInteger poolNumber = new AtomicInteger(1);